Spaces:
Sleeping
Sleeping
Commit
·
d04f0b2
1
Parent(s):
f6c85ec
fix: method call for prediction to show only response message
Browse files
app.py
CHANGED
|
@@ -48,11 +48,7 @@ def initialize_session_state():
|
|
| 48 |
def on_click_callback():
|
| 49 |
with get_openai_callback() as cb:
|
| 50 |
human_prompt = st.session_state.human_prompt
|
| 51 |
-
llm_response = st.session_state.conversation(
|
| 52 |
-
{
|
| 53 |
-
"question": human_prompt,
|
| 54 |
-
}
|
| 55 |
-
)
|
| 56 |
st.session_state.history.append(Message("human", human_prompt))
|
| 57 |
st.session_state.history.append(Message("ai", llm_response))
|
| 58 |
st.session_state.token_count += cb.total_tokens
|
|
@@ -106,7 +102,7 @@ information_placeholder.caption(
|
|
| 106 |
f"""
|
| 107 |
Used {st.session_state.token_count} tokens \n
|
| 108 |
Debug Langchain conversation:
|
| 109 |
-
{st.session_state.memory.buffer}
|
| 110 |
"""
|
| 111 |
)
|
| 112 |
|
|
|
|
| 48 |
def on_click_callback():
|
| 49 |
with get_openai_callback() as cb:
|
| 50 |
human_prompt = st.session_state.human_prompt
|
| 51 |
+
llm_response = st.session_state.conversation.run(human_prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
st.session_state.history.append(Message("human", human_prompt))
|
| 53 |
st.session_state.history.append(Message("ai", llm_response))
|
| 54 |
st.session_state.token_count += cb.total_tokens
|
|
|
|
| 102 |
f"""
|
| 103 |
Used {st.session_state.token_count} tokens \n
|
| 104 |
Debug Langchain conversation:
|
| 105 |
+
{st.session_state.conversation.memory.buffer}
|
| 106 |
"""
|
| 107 |
)
|
| 108 |
|