Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -456,32 +456,76 @@ def display_terms_with_links(terms):
|
|
| 456 |
links_md = ' '.join([f"[{emoji}]({url(term)})" for emoji, url in search_urls.items()])
|
| 457 |
st.markdown(f"- **{term}** {links_md}", unsafe_allow_html=True)
|
| 458 |
|
| 459 |
-
def
|
| 460 |
-
all=""
|
| 461 |
-
st.markdown(f"- {query}")
|
| 462 |
|
| 463 |
-
|
| 464 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
| 465 |
-
response2 = client.predict(
|
| 466 |
-
query, # str in 'parameter_13' Textbox component
|
| 467 |
-
#"mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
|
| 468 |
-
#"mistralai/Mistral-7B-Instruct-v0.2", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
|
| 469 |
-
"google/gemma-7b-it", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
|
| 470 |
-
True, # bool in 'Stream output' Checkbox component
|
| 471 |
-
api_name="/ask_llm"
|
| 472 |
-
)
|
| 473 |
-
st.write('🔍Run of Multi-Agent System Paper Summary Spec is Complete')
|
| 474 |
-
st.markdown(response2)
|
| 475 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 476 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
| 477 |
-
|
| 478 |
-
|
| 479 |
-
|
| 480 |
-
|
| 481 |
-
|
| 482 |
-
|
| 483 |
)
|
| 484 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 485 |
|
| 486 |
def display_file_content(file_path):
|
| 487 |
"""Display file content with editing capabilities."""
|
|
|
|
| 456 |
links_md = ' '.join([f"[{emoji}]({url(term)})" for emoji, url in search_urls.items()])
|
| 457 |
st.markdown(f"- **{term}** {links_md}", unsafe_allow_html=True)
|
| 458 |
|
| 459 |
+
def search_arxiv(query):
|
|
|
|
|
|
|
| 460 |
|
| 461 |
+
st.write("Performing AI Lookup...")
|
| 462 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 463 |
|
| 464 |
+
result1 = client.predict(
|
| 465 |
+
prompt=query,
|
| 466 |
+
llm_model_picked="mistralai/Mixtral-8x7B-Instruct-v0.1",
|
| 467 |
+
stream_outputs=True,
|
| 468 |
+
api_name="/ask_llm"
|
| 469 |
+
)
|
| 470 |
+
st.markdown("### Mixtral-8x7B-Instruct-v0.1 Result")
|
| 471 |
+
st.markdown(result1)
|
| 472 |
+
|
| 473 |
+
result2 = client.predict(
|
| 474 |
+
prompt=query,
|
| 475 |
+
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
|
| 476 |
+
stream_outputs=True,
|
| 477 |
+
api_name="/ask_llm"
|
| 478 |
+
)
|
| 479 |
+
st.markdown("### Mistral-7B-Instruct-v0.2 Result")
|
| 480 |
+
st.markdown(result2)
|
| 481 |
+
combined_result = f"{result1}\n\n{result2}"
|
| 482 |
+
#return combined_result
|
| 483 |
+
|
| 484 |
+
return responseall
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
def perform_ai_lookup(query):
|
| 488 |
+
start_time = time.strftime("%Y-%m-%d %H:%M:%S")
|
| 489 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
| 490 |
+
response1 = client.predict(
|
| 491 |
+
query,
|
| 492 |
+
20,
|
| 493 |
+
"Semantic Search - up to 10 Mar 2024",
|
| 494 |
+
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
| 495 |
+
api_name="/update_with_rag_md"
|
| 496 |
)
|
| 497 |
+
Question = '### 🔎 ' + query + '\r\n' # Format for markdown display with links
|
| 498 |
+
References = response1[0]
|
| 499 |
+
ReferenceLinks = extract_urls(References)
|
| 500 |
+
|
| 501 |
+
RunSecondQuery = True
|
| 502 |
+
results=''
|
| 503 |
+
if RunSecondQuery:
|
| 504 |
+
# Search 2 - Retrieve the Summary with Papers Context and Original Query
|
| 505 |
+
response2 = client.predict(
|
| 506 |
+
query,
|
| 507 |
+
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
| 508 |
+
True,
|
| 509 |
+
api_name="/ask_llm"
|
| 510 |
+
)
|
| 511 |
+
if len(response2) > 10:
|
| 512 |
+
Answer = response2
|
| 513 |
+
SpeechSynthesis(Answer)
|
| 514 |
+
# Restructure results to follow format of Question, Answer, References, ReferenceLinks
|
| 515 |
+
results = Question + '\r\n' + Answer + '\r\n' + References + '\r\n' + ReferenceLinks
|
| 516 |
+
st.markdown(results)
|
| 517 |
+
|
| 518 |
+
st.write('🔍Run of Multi-Agent System Paper Summary Spec is Complete')
|
| 519 |
+
end_time = time.strftime("%Y-%m-%d %H:%M:%S")
|
| 520 |
+
start_timestamp = time.mktime(time.strptime(start_time, "%Y-%m-%d %H:%M:%S"))
|
| 521 |
+
end_timestamp = time.mktime(time.strptime(end_time, "%Y-%m-%d %H:%M:%S"))
|
| 522 |
+
elapsed_seconds = end_timestamp - start_timestamp
|
| 523 |
+
st.write(f"Start time: {start_time}")
|
| 524 |
+
st.write(f"Finish time: {end_time}")
|
| 525 |
+
st.write(f"Elapsed time: {elapsed_seconds:.2f} seconds")
|
| 526 |
+
filename = generate_filename(query, "md")
|
| 527 |
+
create_file(filename, query, results, should_save)
|
| 528 |
+
return results
|
| 529 |
|
| 530 |
def display_file_content(file_path):
|
| 531 |
"""Display file content with editing capabilities."""
|