multimodalart HF Staff commited on
Commit
5b6d50e
·
verified ·
1 Parent(s): c7c8e9e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -31
app.py CHANGED
@@ -43,21 +43,13 @@ def infer(prompt, model_size, seed=42, randomize_seed=False, width=1024, height=
43
  print(img)
44
  return img.images[0], seed
45
 
46
- # Different examples for each model size
47
- examples_06B = [
48
- "a majestic castle on a floating island",
49
- "a robotic chef cooking in a futuristic kitchen",
50
- "a magical forest with glowing mushrooms"
51
  ]
52
 
53
- examples_16B = [
54
- "a steampunk city with airships in the sky",
55
- "a photorealistic fox in a snowy landscape",
56
- "an underwater temple with ancient ruins"
57
- ]
58
-
59
- # We'll use the appropriate set based on the model selection
60
-
61
  css="""
62
  #col-container {
63
  margin: 0 auto;
@@ -140,24 +132,13 @@ with gr.Blocks(css=css) as demo:
140
  value=2,
141
  )
142
 
143
- with gr.Row():
144
- examples_container = gr.Examples(
145
- examples = examples_06B, # Start with 0.6B examples
146
- fn = infer,
147
- inputs = [prompt, model_size],
148
- outputs = [result, seed],
149
- cache_examples="lazy",
150
- label="Example Prompts"
151
- )
152
-
153
- # Update examples when model size changes
154
- def update_examples(model_choice):
155
- if model_choice == "0.6B":
156
- return gr.Examples.update(examples=examples_06B)
157
- else:
158
- return gr.Examples.update(examples=examples_16B)
159
-
160
- model_size.change(fn=update_examples, inputs=[model_size], outputs=[examples_container])
161
 
162
  gr.on(
163
  triggers=[run_button.click, prompt.submit],
 
43
  print(img)
44
  return img.images[0], seed
45
 
46
+ examples = [
47
+ ["a tiny astronaut hatching from an egg on the moon", "0.6B"],
48
+ ["a cat holding a sign that says hello world", "1.6B"],
49
+ ["an anime illustration of a wiener schnitzel", "0.6B"],
50
+ ["a photorealistic landscape of mountains at sunset", "1.6B"],
51
  ]
52
 
 
 
 
 
 
 
 
 
53
  css="""
54
  #col-container {
55
  margin: 0 auto;
 
132
  value=2,
133
  )
134
 
135
+ gr.Examples(
136
+ examples = examples,
137
+ fn = infer,
138
+ inputs = [prompt, model_size], # Add model_size to inputs
139
+ outputs = [result, seed],
140
+ cache_examples="lazy"
141
+ )
 
 
 
 
 
 
 
 
 
 
 
142
 
143
  gr.on(
144
  triggers=[run_button.click, prompt.submit],