Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,8 @@ model = MllamaForConditionalGeneration.from_pretrained(ckpt,
|
|
12 |
torch_dtype=torch.bfloat16).to("cuda")
|
13 |
processor = AutoProcessor.from_pretrained(ckpt)
|
14 |
import requests
|
|
|
|
|
15 |
|
16 |
@spaces.GPU
|
17 |
def bot_streaming(message, history, max_new_tokens=250):
|
@@ -51,6 +53,7 @@ def bot_streaming(message, history, max_new_tokens=250):
|
|
51 |
|
52 |
|
53 |
messages= message['text']
|
|
|
54 |
files = message['files']
|
55 |
for url in files:
|
56 |
response = requests.get(url)
|
|
|
12 |
torch_dtype=torch.bfloat16).to("cuda")
|
13 |
processor = AutoProcessor.from_pretrained(ckpt)
|
14 |
import requests
|
15 |
+
import json
|
16 |
+
|
17 |
|
18 |
@spaces.GPU
|
19 |
def bot_streaming(message, history, max_new_tokens=250):
|
|
|
53 |
|
54 |
|
55 |
messages= message['text']
|
56 |
+
messages = json.loads(messages)
|
57 |
files = message['files']
|
58 |
for url in files:
|
59 |
response = requests.get(url)
|