Spaces:
Running
Running
add streaming
Browse files
app.py
CHANGED
@@ -61,18 +61,24 @@ def chat(message, history):
|
|
61 |
# Add user message
|
62 |
messages.append({"role": "user", "content": message})
|
63 |
|
64 |
-
# Get response from the model
|
65 |
-
|
66 |
model="deepseek-ai/DeepSeek-V3-0324",
|
67 |
messages=messages,
|
68 |
temperature=0.7,
|
69 |
-
max_tokens=1000
|
|
|
70 |
)
|
71 |
|
72 |
-
|
|
|
|
|
|
|
|
|
|
|
73 |
|
74 |
except Exception as e:
|
75 |
-
|
76 |
|
77 |
# Create Gradio interface
|
78 |
with gr.Blocks(title="DeepSearch - AI Search Assistant") as demo:
|
|
|
61 |
# Add user message
|
62 |
messages.append({"role": "user", "content": message})
|
63 |
|
64 |
+
# Get streaming response from the model
|
65 |
+
stream = client.chat.completions.create(
|
66 |
model="deepseek-ai/DeepSeek-V3-0324",
|
67 |
messages=messages,
|
68 |
temperature=0.7,
|
69 |
+
max_tokens=1000,
|
70 |
+
stream=True
|
71 |
)
|
72 |
|
73 |
+
# Stream the response
|
74 |
+
partial_message = ""
|
75 |
+
for chunk in stream:
|
76 |
+
if chunk.choices[0].delta.content is not None:
|
77 |
+
partial_message += chunk.choices[0].delta.content
|
78 |
+
yield partial_message
|
79 |
|
80 |
except Exception as e:
|
81 |
+
yield f"Error: {str(e)}"
|
82 |
|
83 |
# Create Gradio interface
|
84 |
with gr.Blocks(title="DeepSearch - AI Search Assistant") as demo:
|