import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer # ✅ Choose a public model that is available on Hugging Face MODEL_NAME = "mistralai/Mistral-7B-Instruct" # Alternative: "microsoft/BioGPT-Large" # ✅ Load the tokenizer and model try: tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) model = AutoModelForCausalLM.from_pretrained(MODEL_NAME) except Exception as e: print(f"Error loading model: {e}") model = None # Prevents crashing if model doesn't load def diagnose(symptoms): if model is None: return "⚠️ Error: AI model failed to load. Try again later." prompt = f"I have the following symptoms: {symptoms}. What could it be?" inputs = tokenizer(prompt, return_tensors="pt") # ✅ Generate AI response output = model.generate(**inputs, max_length=200) response = tokenizer.decode(output[0], skip_special_tokens=True) return response # ✅ Create a simple web UI interface = gr.Interface( fn=diagnose, inputs="text", outputs="text", title="AI Symptom Checker", description="Enter your symptoms, and the AI will suggest possible conditions." ) # ✅ Launch the web app if __name__ == "__main__": interface.launch()