Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -26,8 +26,8 @@ if not torch.cuda.is_available():
|
|
26 |
base_model = "black-forest-labs/FLUX.1-dev"
|
27 |
pipe = DiffusionPipeline.from_pretrained(base_model, torch_dtype=torch.bfloat16)
|
28 |
|
29 |
-
lora_repo = "strangerzonehf/
|
30 |
-
trigger_word = "
|
31 |
|
32 |
pipe.load_lora_weights(lora_repo)
|
33 |
pipe.to("cuda")
|
@@ -82,7 +82,7 @@ def generate(
|
|
82 |
width=width,
|
83 |
height=height,
|
84 |
guidance_scale=guidance_scale,
|
85 |
-
num_inference_steps=
|
86 |
num_images_per_prompt=1,
|
87 |
output_type="pil",
|
88 |
).images
|
@@ -166,7 +166,7 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
166 |
minimum=1,
|
167 |
maximum=40,
|
168 |
step=1,
|
169 |
-
value=
|
170 |
)
|
171 |
|
172 |
style_selection = gr.Radio(
|
|
|
26 |
base_model = "black-forest-labs/FLUX.1-dev"
|
27 |
pipe = DiffusionPipeline.from_pretrained(base_model, torch_dtype=torch.bfloat16)
|
28 |
|
29 |
+
lora_repo = "strangerzonehf/Flux-Claude-Art"
|
30 |
+
trigger_word = "claude art" # Leave trigger_word blank if not used.
|
31 |
|
32 |
pipe.load_lora_weights(lora_repo)
|
33 |
pipe.to("cuda")
|
|
|
82 |
width=width,
|
83 |
height=height,
|
84 |
guidance_scale=guidance_scale,
|
85 |
+
num_inference_steps=30,
|
86 |
num_images_per_prompt=1,
|
87 |
output_type="pil",
|
88 |
).images
|
|
|
166 |
minimum=1,
|
167 |
maximum=40,
|
168 |
step=1,
|
169 |
+
value=30,
|
170 |
)
|
171 |
|
172 |
style_selection = gr.Radio(
|