attilasimko commited on
Commit
8f731e0
·
1 Parent(s): bba66e3

remote llm

Browse files
Files changed (2) hide show
  1. app.py +3 -3
  2. evaluations/models.py +2 -5
app.py CHANGED
@@ -1,9 +1,9 @@
1
  import streamlit as st
2
  from evaluations.repo_evaluations import evaluate
3
- from evaluations.models import LocalLLM
4
  import requests
5
 
6
- # model = LocalLLM("codellama/CodeLlama-7b-Instruct-hf")
7
 
8
  st.write("\n")
9
  st.write("Welcome to the online reproducibility evaluation tool!")
@@ -14,7 +14,7 @@ repo_link = st.text_input("Github repository link:", value="", type="default", h
14
 
15
  if (repo_link):
16
  verbose = 4 if checkbox else 3
17
- evaluate(llm=None, verbose=verbose, repo_url=repo_link)
18
 
19
  with st.form("my_form"):
20
  st.write("Notice something wrong? Please tell us so we can improve.")
 
1
  import streamlit as st
2
  from evaluations.repo_evaluations import evaluate
3
+ from evaluations.models import RemoteLLM
4
  import requests
5
 
6
+ model = RemoteLLM("codellama/CodeLlama-7b-Instruct-hf")
7
 
8
  st.write("\n")
9
  st.write("Welcome to the online reproducibility evaluation tool!")
 
14
 
15
  if (repo_link):
16
  verbose = 4 if checkbox else 3
17
+ evaluate(llm=model, verbose=verbose, repo_url=repo_link)
18
 
19
  with st.form("my_form"):
20
  st.write("Notice something wrong? Please tell us so we can improve.")
evaluations/models.py CHANGED
@@ -23,13 +23,10 @@ class LocalLLM():
23
  return res
24
 
25
  class RemoteLLM():
26
- def __init__(self):
27
  token = os.getenv("hfToken")
28
- API_URL = "https://api-inference.huggingface.co/models/openlm-research/open_llama_3b_v2"
29
- headers = {"Authorization": f"Bearer {token}", "x-wait-for-model": "true"}
30
-
31
  self.client = InferenceClient(
32
- "meta-llama/Llama-3.1-8B-Instruct",
33
  token=token,
34
  )
35
 
 
23
  return res
24
 
25
  class RemoteLLM():
26
+ def __init__(self, model_name):
27
  token = os.getenv("hfToken")
 
 
 
28
  self.client = InferenceClient(
29
+ model_name,
30
  token=token,
31
  )
32