Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -27,7 +27,6 @@ client = Groq(api_key=groq_api_key)
|
|
| 27 |
|
| 28 |
hf_token = hf_api_key
|
| 29 |
|
| 30 |
-
memory = ConversationBufferMemory()
|
| 31 |
embeddings = HuggingFaceEmbeddings(model_name="heydariAI/persian-embeddings")
|
| 32 |
vector_store = InMemoryVectorStore(embeddings)
|
| 33 |
|
|
@@ -35,7 +34,6 @@ DATASET_NAME = "chat_history"
|
|
| 35 |
try:
|
| 36 |
dataset = load_dataset(DATASET_NAME, use_auth_token=hf_token)
|
| 37 |
except Exception:
|
| 38 |
-
|
| 39 |
dataset = Dataset.from_dict({"Timestamp": [], "User": [], "ParvizGPT": []})
|
| 40 |
|
| 41 |
def save_chat_to_dataset(user_message, bot_message):
|
|
@@ -66,7 +64,7 @@ def process_pdf_with_langchain(pdf_path):
|
|
| 66 |
logger.error(f"Error processing PDF: {e}")
|
| 67 |
raise
|
| 68 |
|
| 69 |
-
def generate_response(query, retriever=None, use_pdf_context=False):
|
| 70 |
try:
|
| 71 |
knowledge = ""
|
| 72 |
|
|
@@ -105,33 +103,33 @@ def generate_response(query, retriever=None, use_pdf_context=False):
|
|
| 105 |
logger.error(f"Attempt {attempt + 1} failed: {e}")
|
| 106 |
time.sleep(2)
|
| 107 |
|
| 108 |
-
return response
|
| 109 |
except Exception as e:
|
| 110 |
logger.error(f"Error generating response: {e}")
|
| 111 |
-
return f"Error: {e}"
|
| 112 |
|
| 113 |
-
def gradio_interface(user_message, chat_box, pdf_file=None, use_pdf_context=False):
|
| 114 |
global retriever
|
| 115 |
if pdf_file is not None and use_pdf_context:
|
| 116 |
try:
|
| 117 |
retriever = process_pdf_with_langchain(pdf_file.name)
|
| 118 |
except Exception as e:
|
| 119 |
-
return chat_box + [("Error", f"Error processing PDF: {e}")]
|
| 120 |
|
| 121 |
chat_box.append(("ParvizGPT", "در حال پردازش..."))
|
| 122 |
|
| 123 |
-
response = generate_response(user_message, retriever=retriever, use_pdf_context=use_pdf_context)
|
| 124 |
|
| 125 |
chat_box[-1] = ("You", user_message)
|
| 126 |
chat_box.append(("ParvizGPT", response))
|
| 127 |
|
| 128 |
save_chat_to_dataset(user_message, response)
|
| 129 |
|
| 130 |
-
return chat_box
|
| 131 |
|
| 132 |
-
def clear_memory():
|
| 133 |
memory.clear()
|
| 134 |
-
return []
|
| 135 |
|
| 136 |
retriever = None
|
| 137 |
|
|
@@ -143,8 +141,11 @@ with gr.Blocks() as interface:
|
|
| 143 |
clear_memory_btn = gr.Button("Clear Memory", interactive=True)
|
| 144 |
pdf_file = gr.File(label="Upload PDF for Context (Optional)", type="filepath", interactive=True, scale=1)
|
| 145 |
submit_btn = gr.Button("Submit")
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
|
|
|
|
|
|
|
|
|
| 149 |
|
| 150 |
interface.launch()
|
|
|
|
| 27 |
|
| 28 |
hf_token = hf_api_key
|
| 29 |
|
|
|
|
| 30 |
embeddings = HuggingFaceEmbeddings(model_name="heydariAI/persian-embeddings")
|
| 31 |
vector_store = InMemoryVectorStore(embeddings)
|
| 32 |
|
|
|
|
| 34 |
try:
|
| 35 |
dataset = load_dataset(DATASET_NAME, use_auth_token=hf_token)
|
| 36 |
except Exception:
|
|
|
|
| 37 |
dataset = Dataset.from_dict({"Timestamp": [], "User": [], "ParvizGPT": []})
|
| 38 |
|
| 39 |
def save_chat_to_dataset(user_message, bot_message):
|
|
|
|
| 64 |
logger.error(f"Error processing PDF: {e}")
|
| 65 |
raise
|
| 66 |
|
| 67 |
+
def generate_response(query, memory, retriever=None, use_pdf_context=False):
|
| 68 |
try:
|
| 69 |
knowledge = ""
|
| 70 |
|
|
|
|
| 103 |
logger.error(f"Attempt {attempt + 1} failed: {e}")
|
| 104 |
time.sleep(2)
|
| 105 |
|
| 106 |
+
return response, memory
|
| 107 |
except Exception as e:
|
| 108 |
logger.error(f"Error generating response: {e}")
|
| 109 |
+
return f"Error: {e}", memory
|
| 110 |
|
| 111 |
+
def gradio_interface(user_message, chat_box, memory, pdf_file=None, use_pdf_context=False):
|
| 112 |
global retriever
|
| 113 |
if pdf_file is not None and use_pdf_context:
|
| 114 |
try:
|
| 115 |
retriever = process_pdf_with_langchain(pdf_file.name)
|
| 116 |
except Exception as e:
|
| 117 |
+
return chat_box + [("Error", f"Error processing PDF: {e}")], memory
|
| 118 |
|
| 119 |
chat_box.append(("ParvizGPT", "در حال پردازش..."))
|
| 120 |
|
| 121 |
+
response, memory = generate_response(user_message, memory, retriever=retriever, use_pdf_context=use_pdf_context)
|
| 122 |
|
| 123 |
chat_box[-1] = ("You", user_message)
|
| 124 |
chat_box.append(("ParvizGPT", response))
|
| 125 |
|
| 126 |
save_chat_to_dataset(user_message, response)
|
| 127 |
|
| 128 |
+
return chat_box, memory
|
| 129 |
|
| 130 |
+
def clear_memory(memory):
|
| 131 |
memory.clear()
|
| 132 |
+
return [], memory
|
| 133 |
|
| 134 |
retriever = None
|
| 135 |
|
|
|
|
| 141 |
clear_memory_btn = gr.Button("Clear Memory", interactive=True)
|
| 142 |
pdf_file = gr.File(label="Upload PDF for Context (Optional)", type="filepath", interactive=True, scale=1)
|
| 143 |
submit_btn = gr.Button("Submit")
|
| 144 |
+
|
| 145 |
+
memory_state = gr.State(ConversationBufferMemory())
|
| 146 |
+
|
| 147 |
+
submit_btn.click(gradio_interface, inputs=[user_message, chat_box, memory_state, pdf_file, use_pdf_context], outputs=[chat_box, memory_state])
|
| 148 |
+
user_message.submit(gradio_interface, inputs=[user_message, chat_box, memory_state, pdf_file, use_pdf_context], outputs=[chat_box, memory_state])
|
| 149 |
+
clear_memory_btn.click(clear_memory, inputs=[memory_state], outputs=[chat_box, memory_state])
|
| 150 |
|
| 151 |
interface.launch()
|