Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -43,21 +43,13 @@ def infer(prompt, model_size, seed=42, randomize_seed=False, width=1024, height=
|
|
| 43 |
print(img)
|
| 44 |
return img.images[0], seed
|
| 45 |
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
"a
|
| 49 |
-
"
|
| 50 |
-
"a
|
| 51 |
]
|
| 52 |
|
| 53 |
-
examples_16B = [
|
| 54 |
-
"a steampunk city with airships in the sky",
|
| 55 |
-
"a photorealistic fox in a snowy landscape",
|
| 56 |
-
"an underwater temple with ancient ruins"
|
| 57 |
-
]
|
| 58 |
-
|
| 59 |
-
# We'll use the appropriate set based on the model selection
|
| 60 |
-
|
| 61 |
css="""
|
| 62 |
#col-container {
|
| 63 |
margin: 0 auto;
|
|
@@ -140,24 +132,13 @@ with gr.Blocks(css=css) as demo:
|
|
| 140 |
value=2,
|
| 141 |
)
|
| 142 |
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
label="Example Prompts"
|
| 151 |
-
)
|
| 152 |
-
|
| 153 |
-
# Update examples when model size changes
|
| 154 |
-
def update_examples(model_choice):
|
| 155 |
-
if model_choice == "0.6B":
|
| 156 |
-
return gr.Examples.update(examples=examples_06B)
|
| 157 |
-
else:
|
| 158 |
-
return gr.Examples.update(examples=examples_16B)
|
| 159 |
-
|
| 160 |
-
model_size.change(fn=update_examples, inputs=[model_size], outputs=[examples_container])
|
| 161 |
|
| 162 |
gr.on(
|
| 163 |
triggers=[run_button.click, prompt.submit],
|
|
|
|
| 43 |
print(img)
|
| 44 |
return img.images[0], seed
|
| 45 |
|
| 46 |
+
examples = [
|
| 47 |
+
["a tiny astronaut hatching from an egg on the moon", "0.6B"],
|
| 48 |
+
["a cat holding a sign that says hello world", "1.6B"],
|
| 49 |
+
["an anime illustration of a wiener schnitzel", "0.6B"],
|
| 50 |
+
["a photorealistic landscape of mountains at sunset", "1.6B"],
|
| 51 |
]
|
| 52 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
css="""
|
| 54 |
#col-container {
|
| 55 |
margin: 0 auto;
|
|
|
|
| 132 |
value=2,
|
| 133 |
)
|
| 134 |
|
| 135 |
+
gr.Examples(
|
| 136 |
+
examples = examples,
|
| 137 |
+
fn = infer,
|
| 138 |
+
inputs = [prompt, model_size], # Add model_size to inputs
|
| 139 |
+
outputs = [result, seed],
|
| 140 |
+
cache_examples="lazy"
|
| 141 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 142 |
|
| 143 |
gr.on(
|
| 144 |
triggers=[run_button.click, prompt.submit],
|