alyshacreelman commited on
Commit
a5749b0
Β·
verified Β·
1 Parent(s): 2f74835

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -15
app.py CHANGED
@@ -10,6 +10,7 @@ pipe = pipeline("text-generation", "microsoft/Phi-3-mini-4k-instruct", torch_dty
10
  # Global flag to handle cancellation
11
  stop_inference = False
12
 
 
13
  def respond(
14
  message,
15
  history: list[tuple[str, str]],
@@ -27,7 +28,7 @@ def respond(
27
  history = []
28
 
29
  if use_local_model:
30
- # Local inference
31
  messages = [{"role": "system", "content": system_message}]
32
  for val in history:
33
  if val[0]:
@@ -53,7 +54,7 @@ def respond(
53
  yield history + [(message, response)] # Yield history + new response
54
 
55
  else:
56
- # API-based inference
57
  messages = [{"role": "system", "content": system_message}]
58
  for val in history:
59
  if val[0]:
@@ -81,6 +82,7 @@ def respond(
81
  response += token
82
  yield history + [(message, response)] # Yield history + new response
83
 
 
84
  def cancel_inference():
85
  global stop_inference
86
  stop_inference = True
@@ -91,7 +93,6 @@ custom_css = """
91
  background: #cdebc5;
92
  font-family: 'Comic Neue', sans-serif;
93
  }
94
-
95
  .gradio-container {
96
  max-width: 700px;
97
  margin: 0 auto;
@@ -100,7 +101,6 @@ custom_css = """
100
  box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
101
  border-radius: 10px;
102
  }
103
-
104
  .gr-button {
105
  background-color: #a7e0fd;
106
  color: light blue;
@@ -110,19 +110,15 @@ custom_css = """
110
  cursor: pointer;
111
  transition: background-color 0.3s ease;
112
  }
113
-
114
  .gr-button:hover {
115
  background-color: #45a049;
116
  }
117
-
118
  .gr-slider input {
119
  color: #4CAF50;
120
  }
121
-
122
  .gr-chat {
123
  font-size: 16px;
124
  }
125
-
126
  #title {
127
  text-align: center;
128
  font-size: 2em;
@@ -131,22 +127,29 @@ custom_css = """
131
  }
132
  """
133
 
 
134
  # Define the interface
135
  with gr.Blocks(css=custom_css) as demo:
136
  gr.Markdown("<h2 style='text-align: center;'>🍎✏️ School AI Chatbot ✏️🍎</h2>")
137
  gr.Markdown("<h1 style='text-align: center;'>πŸ›</h1>")
138
  gr.Markdown("Interact with Wormington Scholar πŸ› by selecting the appropriate level below.")
139
 
140
- with gr.Row():
141
- system_message = gr.State(value="You are a friendly Chatbot.")
142
- gr.Button("Elementary School").click(lambda: system_message.update("You are an elementary school teacher. Please respond with the vocabulary of the seven year old."))
143
- gr.Button("Middle School").click(lambda: system_message.update("You are a middle school teacher. Please respond at a level that middle schoolers can understand"))
144
- gr.Button("High School").click(lambda: system_message.update("You are a high school teacher. Please respond at a level that a high school student can understand."))
145
- gr.Button("College").click(lambda: system_message.update("You are a college Professor. Please respond with very advanced, college-level vocabulary."))
146
 
 
147
  with gr.Row():
 
 
 
 
 
 
 
 
 
 
148
  use_local_model = gr.Checkbox(label="Use Local Model", value=False)
149
 
 
150
  with gr.Row():
151
  max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
152
  temperature = gr.Slider(minimum=0.5, maximum=4.0, value=1.2, step=0.1, label="Temperature")
@@ -163,5 +166,7 @@ with gr.Blocks(css=custom_css) as demo:
163
 
164
  cancel_button.click(cancel_inference)
165
 
 
 
166
  if __name__ == "__main__":
167
- demo.launch(share=False) # Remove share=True because it's not supported on HF Spaces
 
10
  # Global flag to handle cancellation
11
  stop_inference = False
12
 
13
+
14
  def respond(
15
  message,
16
  history: list[tuple[str, str]],
 
28
  history = []
29
 
30
  if use_local_model:
31
+ # local inference
32
  messages = [{"role": "system", "content": system_message}]
33
  for val in history:
34
  if val[0]:
 
54
  yield history + [(message, response)] # Yield history + new response
55
 
56
  else:
57
+ # API-based inference
58
  messages = [{"role": "system", "content": system_message}]
59
  for val in history:
60
  if val[0]:
 
82
  response += token
83
  yield history + [(message, response)] # Yield history + new response
84
 
85
+
86
  def cancel_inference():
87
  global stop_inference
88
  stop_inference = True
 
93
  background: #cdebc5;
94
  font-family: 'Comic Neue', sans-serif;
95
  }
 
96
  .gradio-container {
97
  max-width: 700px;
98
  margin: 0 auto;
 
101
  box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
102
  border-radius: 10px;
103
  }
 
104
  .gr-button {
105
  background-color: #a7e0fd;
106
  color: light blue;
 
110
  cursor: pointer;
111
  transition: background-color 0.3s ease;
112
  }
 
113
  .gr-button:hover {
114
  background-color: #45a049;
115
  }
 
116
  .gr-slider input {
117
  color: #4CAF50;
118
  }
 
119
  .gr-chat {
120
  font-size: 16px;
121
  }
 
122
  #title {
123
  text-align: center;
124
  font-size: 2em;
 
127
  }
128
  """
129
 
130
+
131
  # Define the interface
132
  with gr.Blocks(css=custom_css) as demo:
133
  gr.Markdown("<h2 style='text-align: center;'>🍎✏️ School AI Chatbot ✏️🍎</h2>")
134
  gr.Markdown("<h1 style='text-align: center;'>πŸ›</h1>")
135
  gr.Markdown("Interact with Wormington Scholar πŸ› by selecting the appropriate level below.")
136
 
 
 
 
 
 
 
137
 
138
+
139
  with gr.Row():
140
+ system_message = gr.Dropdown(
141
+ choices=["You are a friendly Chatbot that responds with the vocabulary of the seven year old.",
142
+ "You are a friendly Chatbot. Please respond at a level that middle schoolers can understand",
143
+ "You are a friendly high school Chatbot who responds at a level the average person can understand.",
144
+ "You are a friendly Chatbot that uses a very advanced, college-level vocabulary in your responses."],
145
+ label="System message",
146
+ interactive=True
147
+ )
148
+
149
+ with gr.Row():
150
  use_local_model = gr.Checkbox(label="Use Local Model", value=False)
151
 
152
+
153
  with gr.Row():
154
  max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
155
  temperature = gr.Slider(minimum=0.5, maximum=4.0, value=1.2, step=0.1, label="Temperature")
 
166
 
167
  cancel_button.click(cancel_inference)
168
 
169
+
170
+
171
  if __name__ == "__main__":
172
+ demo.launch(share=False) # Remove share=True because it's not supported on HF Spaces