pratyushmaini commited on
Commit
cd64317
·
1 Parent(s): 2931efa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -15
app.py CHANGED
@@ -9,21 +9,23 @@ model_list = {
9
  }
10
 
11
  def respond(message, history, system_message, max_tokens, temperature, top_p, selected_model):
12
- # Look up the model ID from our list based on the dropdown selection
13
- model_id = model_list.get(selected_model, "HuggingFaceH4/zephyr-7b-beta")
14
- # Create an InferenceClient for the selected model
15
- client = InferenceClient(model_id)
16
 
17
- # Build the conversation history into the message list
18
  messages = [{"role": "system", "content": system_message}]
19
- for user_msg, assistant_msg in history or []:
20
- if user_msg:
21
- messages.append({"role": "user", "content": user_msg})
22
- if assistant_msg:
23
- messages.append({"role": "assistant", "content": assistant_msg})
24
  messages.append({"role": "user", "content": message})
25
 
 
 
26
  response = ""
 
 
 
 
27
  # Stream the response from the client
28
  for token_message in client.chat_completion(
29
  messages,
@@ -34,9 +36,12 @@ def respond(message, history, system_message, max_tokens, temperature, top_p, se
34
  ):
35
  token = token_message.choices[0].delta.content
36
  response += token
37
- yield response
 
 
 
38
 
39
- # CSS styling: pastel backgrounds, gentle light colors, and rounded corners for a safe vibe
40
  css = """
41
  body { background-color: #FAF3E0; }
42
  .gradio-container { background-color: #FFFFFF; border-radius: 16px; padding: 20px; }
@@ -57,7 +62,7 @@ with gr.Blocks(css=css) as demo:
57
  # Main area: Chat interface and settings
58
  with gr.Column(scale=3):
59
  gr.Markdown("## Chat Interface")
60
- chatbot = gr.Chatbot(label="Chat with your Model")
61
  user_input = gr.Textbox(placeholder="Enter your message...", label="Your Message")
62
  with gr.Row():
63
  send_button = gr.Button("Send")
@@ -68,14 +73,14 @@ with gr.Blocks(css=css) as demo:
68
  temperature_slider = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
69
  top_p_slider = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
70
 
71
- # When "Send" is clicked, run the respond() function and update the chat interface.
72
  send_button.click(
73
  fn=respond,
74
  inputs=[user_input, chatbot, system_message, max_tokens_slider, temperature_slider, top_p_slider, model_dropdown],
75
  outputs=[user_input, chatbot],
76
  )
77
 
78
- # Clear the chat history when "Clear Chat" is clicked.
79
  clear_button.click(lambda: None, None, chatbot, queue=False)
80
 
81
  if __name__ == "__main__":
 
9
  }
10
 
11
  def respond(message, history, system_message, max_tokens, temperature, top_p, selected_model):
12
+ # Ensure history is a list
13
+ history = history or []
 
 
14
 
15
+ # Build conversation messages for the client
16
  messages = [{"role": "system", "content": system_message}]
17
+ for user_msg, assistant_msg in history:
18
+ messages.append({"role": "user", "content": user_msg})
19
+ messages.append({"role": "assistant", "content": assistant_msg})
 
 
20
  messages.append({"role": "user", "content": message})
21
 
22
+ # Append new user message to history with an empty assistant response
23
+ history = history + [(message, "")]
24
  response = ""
25
+
26
+ # Create an InferenceClient for the selected model
27
+ client = InferenceClient(model_list.get(selected_model, "HuggingFaceH4/zephyr-7b-beta"))
28
+
29
  # Stream the response from the client
30
  for token_message in client.chat_completion(
31
  messages,
 
36
  ):
37
  token = token_message.choices[0].delta.content
38
  response += token
39
+ # Update the assistant's message in the history
40
+ history[-1] = (message, response)
41
+ # Yield two outputs: clear the input and update the chat history
42
+ yield "", history
43
 
44
+ # Custom CSS for pastel colors, gentle backgrounds, and rounded corners
45
  css = """
46
  body { background-color: #FAF3E0; }
47
  .gradio-container { background-color: #FFFFFF; border-radius: 16px; padding: 20px; }
 
62
  # Main area: Chat interface and settings
63
  with gr.Column(scale=3):
64
  gr.Markdown("## Chat Interface")
65
+ chatbot = gr.Chatbot(label="Chat with your Model", type="messages")
66
  user_input = gr.Textbox(placeholder="Enter your message...", label="Your Message")
67
  with gr.Row():
68
  send_button = gr.Button("Send")
 
73
  temperature_slider = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
74
  top_p_slider = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
75
 
76
+ # Wire the Send button to call the respond() function and update outputs
77
  send_button.click(
78
  fn=respond,
79
  inputs=[user_input, chatbot, system_message, max_tokens_slider, temperature_slider, top_p_slider, model_dropdown],
80
  outputs=[user_input, chatbot],
81
  )
82
 
83
+ # Clear the chat history when Clear Chat is clicked
84
  clear_button.click(lambda: None, None, chatbot, queue=False)
85
 
86
  if __name__ == "__main__":