Callisto-OCR-2B / app.py
prithivMLmods's picture
Update app.py
e128767 verified
raw
history blame
1.92 kB
import gradio as gr
from gender_classification import gender_classification
from emotion_classification import emotion_classification
# Functions to update the model state when a button is clicked.
def select_gender():
return "gender"
def select_emotion():
return "emotion"
# Main classification function that calls the appropriate model based on selection.
def classify(image, model_name):
if model_name == "gender":
return gender_classification(image)
elif model_name == "emotion":
return emotion_classification(image)
else:
return {"Error": "No model selected"}
with gr.Blocks() as demo:
# Sidebar with title and model selection buttons.
with gr.Sidebar():
gr.Markdown("# SigLIP2 Classification")
with gr.Row():
gender_btn = gr.Button("Gender Classification")
emotion_btn = gr.Button("Emotion Classification")
# State to hold the current model choice.
selected_model = gr.State("gender")
# Set model state when buttons are clicked.
gender_btn.click(fn=select_gender, inputs=[], outputs=selected_model)
emotion_btn.click(fn=select_emotion, inputs=[], outputs=selected_model)
gr.Markdown("### Current Model:")
model_display = gr.Textbox(value="gender", interactive=False)
# Update display when state changes.
selected_model.change(lambda m: m, selected_model, model_display)
# Main interface: image input, analyze button, and prediction output.
with gr.Column():
image_input = gr.Image(type="numpy", label="Upload Image")
analyze_btn = gr.Button("Analyze")
output_label = gr.Label(label="Prediction Scores")
# When the "Analyze" button is clicked, use the selected model to classify the image.
analyze_btn.click(fn=classify, inputs=[image_input, selected_model], outputs=output_label)
demo.launch()