Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -22,9 +22,9 @@ from alphabet_sign_language_detection import sign_language_classification
|
|
22 |
from rice_leaf_disease import classify_leaf_disease
|
23 |
from traffic_density import traffic_density_classification
|
24 |
from clip_art import clipart_classification
|
|
|
25 |
|
26 |
-
#Gradio-Theme
|
27 |
-
|
28 |
class Seafoam(Base):
|
29 |
def __init__(
|
30 |
self,
|
@@ -93,8 +93,10 @@ def classify(image, model_name):
|
|
93 |
return sign_language_classification(image)
|
94 |
elif model_name == "traffic density":
|
95 |
return traffic_density_classification(image)
|
96 |
-
elif model_name == "clip art":
|
97 |
return clipart_classification(image)
|
|
|
|
|
98 |
else:
|
99 |
return {"Error": "No model selected"}
|
100 |
|
@@ -104,13 +106,13 @@ def select_model(model_name):
|
|
104 |
"gender": "secondary", "emotion": "secondary", "dog breed": "secondary", "deepfake": "secondary",
|
105 |
"gym workout": "secondary", "waste": "secondary", "age": "secondary", "mnist": "secondary",
|
106 |
"fashion_mnist": "secondary", "food": "secondary", "bird": "secondary", "leaf disease": "secondary",
|
107 |
-
"sign language": "secondary", "traffic density": "secondary", "clip art": "secondary"
|
|
|
108 |
}
|
109 |
model_variants[model_name] = "primary"
|
110 |
return (model_name, *(gr.update(variant=model_variants[key]) for key in model_variants))
|
111 |
|
112 |
# Zero-Shot Classification Setup (SigLIP models)
|
113 |
-
|
114 |
# Load the SigLIP models and processors
|
115 |
sg1_ckpt = "google/siglip-so400m-patch14-384"
|
116 |
siglip1_model = AutoModel.from_pretrained(sg1_ckpt, device_map="cpu").eval()
|
@@ -172,7 +174,8 @@ with gr.Blocks(theme=seafoam) as demo:
|
|
172 |
leaf_disease_btn = gr.Button("Rice Leaf Disease", variant="secondary")
|
173 |
sign_language_btn = gr.Button("Alphabet Sign Language", variant="secondary")
|
174 |
traffic_density_btn = gr.Button("Traffic Density", variant="secondary")
|
175 |
-
clip_art_btn = gr.Button("Art Classification", variant="secondary")
|
|
|
176 |
|
177 |
selected_model = gr.State("age")
|
178 |
gr.Markdown("### Current Model:")
|
@@ -182,12 +185,12 @@ with gr.Blocks(theme=seafoam) as demo:
|
|
182 |
buttons = [
|
183 |
gender_btn, emotion_btn, dog_breed_btn, deepfake_btn, gym_workout_btn, waste_btn,
|
184 |
age_btn, mnist_btn, fashion_mnist_btn, food_btn, bird_btn, leaf_disease_btn,
|
185 |
-
sign_language_btn, traffic_density_btn, clip_art_btn # Include new button
|
186 |
]
|
187 |
model_names = [
|
188 |
"gender", "emotion", "dog breed", "deepfake", "gym workout", "waste",
|
189 |
"age", "mnist", "fashion_mnist", "food", "bird", "leaf disease",
|
190 |
-
"sign language", "traffic density", "clip art" # New model name
|
191 |
]
|
192 |
|
193 |
for btn, name in zip(buttons, model_names):
|
@@ -213,4 +216,4 @@ with gr.Blocks(theme=seafoam) as demo:
|
|
213 |
siglip2_output = gr.Label(label="SigLIP 2 Output", num_top_classes=3)
|
214 |
zs_run_button.click(fn=infer, inputs=[zs_image_input, zs_text_input], outputs=[siglip1_output, siglip2_output])
|
215 |
|
216 |
-
demo.launch()
|
|
|
22 |
from rice_leaf_disease import classify_leaf_disease
|
23 |
from traffic_density import traffic_density_classification
|
24 |
from clip_art import clipart_classification
|
25 |
+
from multisource_121 import multisource_classification # New import
|
26 |
|
27 |
+
# Gradio-Theme
|
|
|
28 |
class Seafoam(Base):
|
29 |
def __init__(
|
30 |
self,
|
|
|
93 |
return sign_language_classification(image)
|
94 |
elif model_name == "traffic density":
|
95 |
return traffic_density_classification(image)
|
96 |
+
elif model_name == "clip art":
|
97 |
return clipart_classification(image)
|
98 |
+
elif model_name == "multisource":
|
99 |
+
return multisource_classification(image)
|
100 |
else:
|
101 |
return {"Error": "No model selected"}
|
102 |
|
|
|
106 |
"gender": "secondary", "emotion": "secondary", "dog breed": "secondary", "deepfake": "secondary",
|
107 |
"gym workout": "secondary", "waste": "secondary", "age": "secondary", "mnist": "secondary",
|
108 |
"fashion_mnist": "secondary", "food": "secondary", "bird": "secondary", "leaf disease": "secondary",
|
109 |
+
"sign language": "secondary", "traffic density": "secondary", "clip art": "secondary",
|
110 |
+
"multisource": "secondary" # New model variant
|
111 |
}
|
112 |
model_variants[model_name] = "primary"
|
113 |
return (model_name, *(gr.update(variant=model_variants[key]) for key in model_variants))
|
114 |
|
115 |
# Zero-Shot Classification Setup (SigLIP models)
|
|
|
116 |
# Load the SigLIP models and processors
|
117 |
sg1_ckpt = "google/siglip-so400m-patch14-384"
|
118 |
siglip1_model = AutoModel.from_pretrained(sg1_ckpt, device_map="cpu").eval()
|
|
|
174 |
leaf_disease_btn = gr.Button("Rice Leaf Disease", variant="secondary")
|
175 |
sign_language_btn = gr.Button("Alphabet Sign Language", variant="secondary")
|
176 |
traffic_density_btn = gr.Button("Traffic Density", variant="secondary")
|
177 |
+
clip_art_btn = gr.Button("Art Classification", variant="secondary")
|
178 |
+
multisource_btn = gr.Button("Multi-Source Classification", variant="secondary") # New button
|
179 |
|
180 |
selected_model = gr.State("age")
|
181 |
gr.Markdown("### Current Model:")
|
|
|
185 |
buttons = [
|
186 |
gender_btn, emotion_btn, dog_breed_btn, deepfake_btn, gym_workout_btn, waste_btn,
|
187 |
age_btn, mnist_btn, fashion_mnist_btn, food_btn, bird_btn, leaf_disease_btn,
|
188 |
+
sign_language_btn, traffic_density_btn, clip_art_btn, multisource_btn # Include new button
|
189 |
]
|
190 |
model_names = [
|
191 |
"gender", "emotion", "dog breed", "deepfake", "gym workout", "waste",
|
192 |
"age", "mnist", "fashion_mnist", "food", "bird", "leaf disease",
|
193 |
+
"sign language", "traffic density", "clip art", "multisource" # New model name
|
194 |
]
|
195 |
|
196 |
for btn, name in zip(buttons, model_names):
|
|
|
216 |
siglip2_output = gr.Label(label="SigLIP 2 Output", num_top_classes=3)
|
217 |
zs_run_button.click(fn=infer, inputs=[zs_image_input, zs_text_input], outputs=[siglip1_output, siglip2_output])
|
218 |
|
219 |
+
demo.launch()
|