Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -777,34 +777,33 @@ def main():
|
|
| 777 |
st.markdown(file_contents)
|
| 778 |
|
| 779 |
buttonlabel = '🔍Run with Llama and GPT.'
|
| 780 |
-
if st.button(key='RunWithLlamaandGPT', label = buttonlabel)
|
| 781 |
-
|
| 782 |
-
|
| 783 |
-
|
| 784 |
-
|
| 785 |
-
|
| 786 |
-
|
| 787 |
-
|
| 788 |
-
|
| 789 |
-
|
| 790 |
-
|
| 791 |
-
|
| 792 |
-
|
| 793 |
-
|
| 794 |
-
|
| 795 |
-
|
| 796 |
-
|
| 797 |
-
|
| 798 |
-
|
| 799 |
-
|
| 800 |
-
|
| 801 |
-
|
| 802 |
-
|
| 803 |
-
|
| 804 |
-
|
| 805 |
-
|
| 806 |
-
|
| 807 |
-
|
| 808 |
|
| 809 |
|
| 810 |
if next_action=='search':
|
|
|
|
| 777 |
st.markdown(file_contents)
|
| 778 |
|
| 779 |
buttonlabel = '🔍Run with Llama and GPT.'
|
| 780 |
+
if st.button(key='RunWithLlamaandGPT', label = buttonlabel):
|
| 781 |
+
user_prompt = file_contents
|
| 782 |
+
|
| 783 |
+
# Llama versus GPT Battle!
|
| 784 |
+
all=""
|
| 785 |
+
try:
|
| 786 |
+
st.write('🔍Running with Llama.')
|
| 787 |
+
response = StreamLLMChatResponse(file_contents)
|
| 788 |
+
filename = generate_filename(user_prompt, ".md")
|
| 789 |
+
create_file(filename, file_contents, response, should_save)
|
| 790 |
+
all=response
|
| 791 |
+
#SpeechSynthesis(response)
|
| 792 |
+
except:
|
| 793 |
+
st.markdown('Llama is sleeping. Restart ETA 30 seconds.')
|
| 794 |
+
|
| 795 |
+
# gpt
|
| 796 |
+
try:
|
| 797 |
+
st.write('🔍Running with GPT.')
|
| 798 |
+
response2 = chat_with_model(user_prompt, file_contents, model_choice)
|
| 799 |
+
filename2 = generate_filename(file_contents, choice)
|
| 800 |
+
create_file(filename2, user_prompt, response, should_save)
|
| 801 |
+
all=all+response2
|
| 802 |
+
#SpeechSynthesis(response2)
|
| 803 |
+
except:
|
| 804 |
+
st.markdown('GPT is sleeping. Restart ETA 30 seconds.')
|
| 805 |
+
|
| 806 |
+
SpeechSynthesis(all)
|
|
|
|
| 807 |
|
| 808 |
|
| 809 |
if next_action=='search':
|