Spaces:
Runtime error
Runtime error
Update utils.py
Browse files
utils.py
CHANGED
@@ -47,18 +47,19 @@ def get_similar_docs(query, k=2, score=False):
|
|
47 |
|
48 |
# model = AutoModelForCausalLM.from_pretrained("gpt2")
|
49 |
# tokenizer = AutoTokenizer.from_pretrained("gpt2")
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
|
|
62 |
|
63 |
llm = HuggingFacePipeline(pipeline=text_generation_pipeline)
|
64 |
|
|
|
47 |
|
48 |
# model = AutoModelForCausalLM.from_pretrained("gpt2")
|
49 |
# tokenizer = AutoTokenizer.from_pretrained("gpt2")
|
50 |
+
tokenizer = AutoTokenizer.from_pretrained("TinyLlama/TinyLlama_v1.1")
|
51 |
+
model = AutoModelForCausalLM.from_pretrained("TinyLlama/TinyLlama_v1.1")
|
52 |
+
text_generation_pipeline = pipeline(
|
53 |
+
model=model,
|
54 |
+
tokenizer=tokenizer,
|
55 |
+
task="text-generation",
|
56 |
+
temperature=0.2,
|
57 |
+
do_sample=True,
|
58 |
+
repetition_penalty=1.1,
|
59 |
+
return_full_text=True,
|
60 |
+
max_new_tokens=400,
|
61 |
+
)
|
62 |
+
# text_generation_pipeline = pipeline("text-generation", model="bigscience/bloom-1b7")
|
63 |
|
64 |
llm = HuggingFacePipeline(pipeline=text_generation_pipeline)
|
65 |
|