Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -26,8 +26,8 @@ Key = os.environ.get("Key") # ๐ Don't forget your key!
|
|
| 26 |
LOCAL_APP_URL = "https://huggingface.co/spaces/awacke1/AzureCosmosDBUI"
|
| 27 |
|
| 28 |
# ๐ค OpenAI configuration
|
| 29 |
-
#openai.api_key = os.environ.get("OPENAI_API_KEY")
|
| 30 |
-
#MODEL = "gpt-3.5-turbo" # Replace with your desired model
|
| 31 |
|
| 32 |
# ๐ GitHub configuration
|
| 33 |
def download_github_repo(url, local_path):
|
|
@@ -95,6 +95,7 @@ def get_documents(container, limit=None):
|
|
| 95 |
|
| 96 |
# ๐ Cosmos DB functions
|
| 97 |
def insert_record(container, record):
|
|
|
|
| 98 |
try:
|
| 99 |
container.create_item(body=record)
|
| 100 |
return True, "Record inserted successfully! ๐"
|
|
@@ -104,6 +105,7 @@ def insert_record(container, record):
|
|
| 104 |
return False, f"An unexpected error occurred: {str(e)} ๐ฑ"
|
| 105 |
|
| 106 |
def update_record(container, updated_record):
|
|
|
|
| 107 |
try:
|
| 108 |
container.upsert_item(body=updated_record)
|
| 109 |
return True, f"Record with id {updated_record['id']} successfully updated. ๐ ๏ธ"
|
|
@@ -113,6 +115,7 @@ def update_record(container, updated_record):
|
|
| 113 |
return False, f"An unexpected error occurred: {traceback.format_exc()} ๐ฑ"
|
| 114 |
|
| 115 |
def delete_record(container, name, id):
|
|
|
|
| 116 |
try:
|
| 117 |
container.delete_item(item=id, partition_key=id)
|
| 118 |
return True, f"Successfully deleted record with name: {name} and id: {id} ๐๏ธ"
|
|
@@ -130,6 +133,7 @@ def generate_unique_id():
|
|
| 130 |
|
| 131 |
# ๐ฆ Function to archive current container
|
| 132 |
def archive_current_container(database_name, container_name, client):
|
|
|
|
| 133 |
try:
|
| 134 |
base_dir = "./cosmos_archive_current_container"
|
| 135 |
if os.path.exists(base_dir):
|
|
@@ -156,8 +160,9 @@ def archive_current_container(database_name, container_name, client):
|
|
| 156 |
return f"An error occurred while archiving data: {str(e)} ๐ข"
|
| 157 |
|
| 158 |
|
| 159 |
-
# Helper to extract hyperlinks
|
| 160 |
def extract_hyperlinks(responses):
|
|
|
|
| 161 |
hyperlinks = []
|
| 162 |
for response in responses:
|
| 163 |
parsed_response = json.loads(response)
|
|
@@ -165,17 +170,20 @@ def extract_hyperlinks(responses):
|
|
| 165 |
hyperlinks.extend(links)
|
| 166 |
return hyperlinks
|
| 167 |
|
| 168 |
-
# Helper to format text with line numbers
|
| 169 |
def format_with_line_numbers(text):
|
|
|
|
| 170 |
lines = text.splitlines()
|
| 171 |
formatted_text = '\n'.join(f"{i+1}: {line}" for i, line in enumerate(lines))
|
| 172 |
return formatted_text
|
| 173 |
|
| 174 |
-
# Save responses to Cosmos DB
|
| 175 |
def save_to_cosmos_db(query, response1, response2):
|
|
|
|
| 176 |
cosmos_container = st.session_state.get("cosmos_container")
|
| 177 |
if cosmos_container:
|
| 178 |
record = {
|
|
|
|
| 179 |
"query": query,
|
| 180 |
"response1": json.loads(response1),
|
| 181 |
"response2": json.loads(response2)
|
|
@@ -188,9 +196,9 @@ def save_to_cosmos_db(query, response1, response2):
|
|
| 188 |
else:
|
| 189 |
st.error("Cosmos DB is not initialized.")
|
| 190 |
|
| 191 |
-
|
| 192 |
-
# Add dropdowns for model and database choices
|
| 193 |
def search_glossary(query):
|
|
|
|
| 194 |
st.markdown(f"### ๐ Search Glossary for: `{query}`")
|
| 195 |
|
| 196 |
# Dropdown for model selection
|
|
@@ -201,86 +209,16 @@ def search_glossary(query):
|
|
| 201 |
database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)']
|
| 202 |
database_choice = st.selectbox('๐ Select Database', options=database_options, index=0)
|
| 203 |
|
| 204 |
-
|
| 205 |
-
|
| 206 |
# Run Button with Emoji
|
| 207 |
if st.button("๐ Run"):
|
| 208 |
-
|
| 209 |
-
# ๐ต๏ธโโ๏ธ Searching the glossary for: query
|
| 210 |
-
all_results = ""
|
| 211 |
-
st.markdown(f"- {query}")
|
| 212 |
-
|
| 213 |
-
# ๐ ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
|
| 214 |
-
#database_choice Literal['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)'] Default: "Semantic Search"
|
| 215 |
-
#llm_model_picked Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] Default: "mistralai/Mistral-7B-Instruct-v0.2"
|
| 216 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
| 217 |
-
|
| 218 |
-
|
| 219 |
-
# ๐ ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
|
| 220 |
-
result = client.predict(
|
| 221 |
-
prompt=query,
|
| 222 |
-
llm_model_picked="mistralai/Mixtral-8x7B-Instruct-v0.1",
|
| 223 |
-
stream_outputs=True,
|
| 224 |
-
api_name="/ask_llm"
|
| 225 |
-
)
|
| 226 |
-
st.markdown(result)
|
| 227 |
-
st.code(result, language="python", line_numbers=True)
|
| 228 |
-
|
| 229 |
-
# ๐ ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
|
| 230 |
-
result2 = client.predict(
|
| 231 |
-
prompt=query,
|
| 232 |
-
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
|
| 233 |
-
stream_outputs=True,
|
| 234 |
-
api_name="/ask_llm"
|
| 235 |
-
)
|
| 236 |
-
st.markdown(result2)
|
| 237 |
-
st.code(result2, language="python", line_numbers=True)
|
| 238 |
-
|
| 239 |
-
# ๐ ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
|
| 240 |
-
result3 = client.predict(
|
| 241 |
-
prompt=query,
|
| 242 |
-
llm_model_picked="google/gemma-7b-it",
|
| 243 |
-
stream_outputs=True,
|
| 244 |
-
api_name="/ask_llm"
|
| 245 |
-
)
|
| 246 |
-
st.markdown(result3)
|
| 247 |
-
st.code(result3, language="python", line_numbers=True)
|
| 248 |
-
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
# ๐ ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /update_with_rag_md
|
| 252 |
-
response2 = client.predict(
|
| 253 |
-
message=query, # str in 'parameter_13' Textbox component
|
| 254 |
-
llm_results_use=10,
|
| 255 |
-
database_choice="Semantic Search",
|
| 256 |
-
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
|
| 257 |
-
api_name="/update_with_rag_md"
|
| 258 |
-
) # update_with_rag_md Returns tuple of 2 elements [0] str The output value that appears in the "value_14" Markdown component. [1] str
|
| 259 |
-
|
| 260 |
-
st.markdown(response2[0])
|
| 261 |
-
st.code(response2[0], language="python", line_numbers=True, wrap_lines=True)
|
| 262 |
-
|
| 263 |
-
st.markdown(response2[1])
|
| 264 |
-
st.code(response2[1], language="python", line_numbers=True, wrap_lines=True)
|
| 265 |
-
|
| 266 |
|
| 267 |
-
#
|
| 268 |
-
hyperlinks = extract_hyperlinks([response1, response2])
|
| 269 |
-
st.markdown("### ๐ Aggregated Hyperlinks")
|
| 270 |
-
for link in hyperlinks:
|
| 271 |
-
st.markdown(f"๐ [{link}]({link})")
|
| 272 |
|
| 273 |
-
|
| 274 |
-
st.markdown("### ๐ Response Outputs with Line Numbers")
|
| 275 |
-
st.code(f"Response 1: \n{format_with_line_numbers(response1)}\n\nResponse 2: \n{format_with_line_numbers(response2)}", language="json")
|
| 276 |
-
|
| 277 |
-
# Save both responses to Cosmos DB
|
| 278 |
-
save_to_cosmos_db(query, response2, result)
|
| 279 |
-
|
| 280 |
-
|
| 281 |
-
|
| 282 |
-
# ๐ Function to process text input
|
| 283 |
def process_text(text_input):
|
|
|
|
| 284 |
if text_input:
|
| 285 |
if 'messages' not in st.session_state:
|
| 286 |
st.session_state.messages = []
|
|
@@ -291,29 +229,9 @@ def process_text(text_input):
|
|
| 291 |
st.markdown(text_input)
|
| 292 |
|
| 293 |
with st.chat_message("assistant"):
|
| 294 |
-
|
| 295 |
search_glossary(text_input)
|
| 296 |
|
| 297 |
-
|
| 298 |
-
useOpenAI=False
|
| 299 |
-
if useOpenAI:
|
| 300 |
-
completion = openai.ChatCompletion.create(
|
| 301 |
-
model=MODEL,
|
| 302 |
-
messages=[
|
| 303 |
-
{"role": m["role"], "content": m["content"]}
|
| 304 |
-
for m in st.session_state.messages
|
| 305 |
-
],
|
| 306 |
-
stream=False
|
| 307 |
-
)
|
| 308 |
-
return_text = completion.choices[0].message.content
|
| 309 |
-
st.write("Assistant: " + return_text)
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
filename = generate_filename(text_input, "md")
|
| 313 |
-
create_and_save_file(return_text, file_type="md", prompt=text_input, is_image=False, should_save=True)
|
| 314 |
-
st.session_state.messages.append({"role": "assistant", "content": return_text})
|
| 315 |
-
|
| 316 |
-
# ๐ Function to generate a filename
|
| 317 |
def generate_filename(text, file_type):
|
| 318 |
# ๐ Generate a filename based on the text input
|
| 319 |
safe_text = "".join(c if c.isalnum() or c in (' ', '.', '_') else '_' for c in text)
|
|
@@ -321,9 +239,9 @@ def generate_filename(text, file_type):
|
|
| 321 |
filename = f"{safe_text}.{file_type}"
|
| 322 |
return filename
|
| 323 |
|
| 324 |
-
#
|
| 325 |
def extract_markdown_title(content):
|
| 326 |
-
#
|
| 327 |
lines = content.splitlines()
|
| 328 |
for line in lines:
|
| 329 |
if line.startswith('#'):
|
|
@@ -332,17 +250,14 @@ def extract_markdown_title(content):
|
|
| 332 |
|
| 333 |
# ๐พ Function to create and save a file
|
| 334 |
def create_and_save_file(content, file_type="md", prompt=None, is_image=False, should_save=True):
|
| 335 |
-
|
| 336 |
-
Combines file name generation and file creation into one function.
|
| 337 |
-
If the file is a markdown file, extracts the title from the content (if available) and uses it for the filename.
|
| 338 |
-
"""
|
| 339 |
if not should_save:
|
| 340 |
return None
|
| 341 |
|
| 342 |
# Step 1: Generate filename based on the prompt or content
|
| 343 |
filename = generate_filename(prompt if prompt else content, file_type)
|
| 344 |
|
| 345 |
-
# Step 2: If it's a markdown file, check if it has a title
|
| 346 |
if file_type == "md":
|
| 347 |
title_from_content = extract_markdown_title(content)
|
| 348 |
if title_from_content:
|
|
@@ -357,8 +272,30 @@ def create_and_save_file(content, file_type="md", prompt=None, is_image=False, s
|
|
| 357 |
|
| 358 |
return filename
|
| 359 |
|
| 360 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 361 |
def main():
|
|
|
|
| 362 |
st.title("๐Git๐Cosmos๐ซ - Azure Cosmos DB and Github Agent")
|
| 363 |
|
| 364 |
# ๐ฆ Initialize session state
|
|
@@ -608,23 +545,31 @@ def main():
|
|
| 608 |
elif selected_view == 'New Record':
|
| 609 |
# ๐ New Record
|
| 610 |
st.markdown("#### Create a new document:")
|
| 611 |
-
|
| 612 |
-
|
| 613 |
-
|
| 614 |
-
|
| 615 |
-
|
| 616 |
-
|
| 617 |
-
|
| 618 |
-
|
| 619 |
-
|
| 620 |
-
|
| 621 |
-
|
| 622 |
-
|
| 623 |
-
|
| 624 |
-
|
| 625 |
-
|
| 626 |
-
|
| 627 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 628 |
else:
|
| 629 |
st.sidebar.info("No documents found in this container. ๐ญ")
|
| 630 |
|
|
|
|
| 26 |
LOCAL_APP_URL = "https://huggingface.co/spaces/awacke1/AzureCosmosDBUI"
|
| 27 |
|
| 28 |
# ๐ค OpenAI configuration
|
| 29 |
+
# openai.api_key = os.environ.get("OPENAI_API_KEY")
|
| 30 |
+
# MODEL = "gpt-3.5-turbo" # Replace with your desired model
|
| 31 |
|
| 32 |
# ๐ GitHub configuration
|
| 33 |
def download_github_repo(url, local_path):
|
|
|
|
| 95 |
|
| 96 |
# ๐ Cosmos DB functions
|
| 97 |
def insert_record(container, record):
|
| 98 |
+
# ๐ฅ Inserting a record into the Cosmosโhope we don't disturb any aliens! ๐ฝ
|
| 99 |
try:
|
| 100 |
container.create_item(body=record)
|
| 101 |
return True, "Record inserted successfully! ๐"
|
|
|
|
| 105 |
return False, f"An unexpected error occurred: {str(e)} ๐ฑ"
|
| 106 |
|
| 107 |
def update_record(container, updated_record):
|
| 108 |
+
# ๐ Updating a recordโgiving it a cosmic makeover! โจ
|
| 109 |
try:
|
| 110 |
container.upsert_item(body=updated_record)
|
| 111 |
return True, f"Record with id {updated_record['id']} successfully updated. ๐ ๏ธ"
|
|
|
|
| 115 |
return False, f"An unexpected error occurred: {traceback.format_exc()} ๐ฑ"
|
| 116 |
|
| 117 |
def delete_record(container, name, id):
|
| 118 |
+
# ๐๏ธ Deleting a recordโsending it into the cosmic void! ๐
|
| 119 |
try:
|
| 120 |
container.delete_item(item=id, partition_key=id)
|
| 121 |
return True, f"Successfully deleted record with name: {name} and id: {id} ๐๏ธ"
|
|
|
|
| 133 |
|
| 134 |
# ๐ฆ Function to archive current container
|
| 135 |
def archive_current_container(database_name, container_name, client):
|
| 136 |
+
# ๐ฆ Archiving the entire containerโtime to pack up the stars! ๐
|
| 137 |
try:
|
| 138 |
base_dir = "./cosmos_archive_current_container"
|
| 139 |
if os.path.exists(base_dir):
|
|
|
|
| 160 |
return f"An error occurred while archiving data: {str(e)} ๐ข"
|
| 161 |
|
| 162 |
|
| 163 |
+
# ๐ Helper to extract hyperlinks
|
| 164 |
def extract_hyperlinks(responses):
|
| 165 |
+
# ๐ Extracting hyperlinksโconnecting the dots across the universe! ๐ธ๏ธ
|
| 166 |
hyperlinks = []
|
| 167 |
for response in responses:
|
| 168 |
parsed_response = json.loads(response)
|
|
|
|
| 170 |
hyperlinks.extend(links)
|
| 171 |
return hyperlinks
|
| 172 |
|
| 173 |
+
# ๐ Helper to format text with line numbers
|
| 174 |
def format_with_line_numbers(text):
|
| 175 |
+
# ๐ Formatting text with line numbersโorganizing the cosmos one line at a time! ๐
|
| 176 |
lines = text.splitlines()
|
| 177 |
formatted_text = '\n'.join(f"{i+1}: {line}" for i, line in enumerate(lines))
|
| 178 |
return formatted_text
|
| 179 |
|
| 180 |
+
# ๐พ Save responses to Cosmos DB
|
| 181 |
def save_to_cosmos_db(query, response1, response2):
|
| 182 |
+
# ๐พ Saving responses to Cosmos DBโbecause even the cosmos needs backups! ๐ฟ
|
| 183 |
cosmos_container = st.session_state.get("cosmos_container")
|
| 184 |
if cosmos_container:
|
| 185 |
record = {
|
| 186 |
+
"id": generate_unique_id(),
|
| 187 |
"query": query,
|
| 188 |
"response1": json.loads(response1),
|
| 189 |
"response2": json.loads(response2)
|
|
|
|
| 196 |
else:
|
| 197 |
st.error("Cosmos DB is not initialized.")
|
| 198 |
|
| 199 |
+
# ๐ต๏ธโโ๏ธ Search Glossary function
|
|
|
|
| 200 |
def search_glossary(query):
|
| 201 |
+
# ๐ Searching the glossaryโuncovering secrets of the universe! ๐ต๏ธโโ๏ธ
|
| 202 |
st.markdown(f"### ๐ Search Glossary for: `{query}`")
|
| 203 |
|
| 204 |
# Dropdown for model selection
|
|
|
|
| 209 |
database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)']
|
| 210 |
database_choice = st.selectbox('๐ Select Database', options=database_options, index=0)
|
| 211 |
|
|
|
|
|
|
|
| 212 |
# Run Button with Emoji
|
| 213 |
if st.button("๐ Run"):
|
| 214 |
+
# ๐ต๏ธโโ๏ธ We have a query! Let's process it!
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 215 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 216 |
|
| 217 |
+
# Rest of the code for processing the query...
|
|
|
|
|
|
|
|
|
|
|
|
|
| 218 |
|
| 219 |
+
# ๐ค Function to process text input
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 220 |
def process_text(text_input):
|
| 221 |
+
# ๐ค Processing text inputโtranslating human words into cosmic signals! ๐ก
|
| 222 |
if text_input:
|
| 223 |
if 'messages' not in st.session_state:
|
| 224 |
st.session_state.messages = []
|
|
|
|
| 229 |
st.markdown(text_input)
|
| 230 |
|
| 231 |
with st.chat_message("assistant"):
|
|
|
|
| 232 |
search_glossary(text_input)
|
| 233 |
|
| 234 |
+
# ๐ Function to generate a filename
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 235 |
def generate_filename(text, file_type):
|
| 236 |
# ๐ Generate a filename based on the text input
|
| 237 |
safe_text = "".join(c if c.isalnum() or c in (' ', '.', '_') else '_' for c in text)
|
|
|
|
| 239 |
filename = f"{safe_text}.{file_type}"
|
| 240 |
return filename
|
| 241 |
|
| 242 |
+
# ๐ต๏ธโโ๏ธ Function to extract markdown title
|
| 243 |
def extract_markdown_title(content):
|
| 244 |
+
# ๐ต๏ธโโ๏ธ Extracting markdown titleโfinding the headline in the cosmic news! ๐ฐ
|
| 245 |
lines = content.splitlines()
|
| 246 |
for line in lines:
|
| 247 |
if line.startswith('#'):
|
|
|
|
| 250 |
|
| 251 |
# ๐พ Function to create and save a file
|
| 252 |
def create_and_save_file(content, file_type="md", prompt=None, is_image=False, should_save=True):
|
| 253 |
+
# ๐พ Creating and saving a fileโcapturing cosmic wisdom! ๐
|
|
|
|
|
|
|
|
|
|
| 254 |
if not should_save:
|
| 255 |
return None
|
| 256 |
|
| 257 |
# Step 1: Generate filename based on the prompt or content
|
| 258 |
filename = generate_filename(prompt if prompt else content, file_type)
|
| 259 |
|
| 260 |
+
# Step 2: If it's a markdown file, check if it has a title
|
| 261 |
if file_type == "md":
|
| 262 |
title_from_content = extract_markdown_title(content)
|
| 263 |
if title_from_content:
|
|
|
|
| 272 |
|
| 273 |
return filename
|
| 274 |
|
| 275 |
+
# ๐ค Function to insert an auto-generated record
|
| 276 |
+
def insert_auto_generated_record(container):
|
| 277 |
+
# ๐ค Automatically generating a record and inserting it into Cosmos DB!
|
| 278 |
+
try:
|
| 279 |
+
# Generate a unique id
|
| 280 |
+
new_id = generate_unique_id()
|
| 281 |
+
# Create a sample JSON document
|
| 282 |
+
new_doc = {
|
| 283 |
+
'id': new_id,
|
| 284 |
+
'name': f'Sample Name {new_id[:8]}',
|
| 285 |
+
'description': 'This is a sample auto-generated description.',
|
| 286 |
+
'timestamp': datetime.utcnow().isoformat()
|
| 287 |
+
}
|
| 288 |
+
# Insert the document
|
| 289 |
+
container.create_item(body=new_doc)
|
| 290 |
+
return True, f"Record inserted successfully with id: {new_id} ๐"
|
| 291 |
+
except exceptions.CosmosHttpResponseError as e:
|
| 292 |
+
return False, f"HTTP error occurred: {str(e)} ๐จ"
|
| 293 |
+
except Exception as e:
|
| 294 |
+
return False, f"An unexpected error occurred: {str(e)} ๐ฑ"
|
| 295 |
+
|
| 296 |
+
# ๐ Main function
|
| 297 |
def main():
|
| 298 |
+
# ๐ Let's modify the main app to be more fun!
|
| 299 |
st.title("๐Git๐Cosmos๐ซ - Azure Cosmos DB and Github Agent")
|
| 300 |
|
| 301 |
# ๐ฆ Initialize session state
|
|
|
|
| 545 |
elif selected_view == 'New Record':
|
| 546 |
# ๐ New Record
|
| 547 |
st.markdown("#### Create a new document:")
|
| 548 |
+
if st.button("๐ค Insert Auto-Generated Record"):
|
| 549 |
+
success, message = insert_auto_generated_record(container)
|
| 550 |
+
if success:
|
| 551 |
+
st.success(message)
|
| 552 |
+
st.rerun()
|
| 553 |
+
else:
|
| 554 |
+
st.error(message)
|
| 555 |
+
else:
|
| 556 |
+
new_id = st.text_input("ID", value=generate_unique_id(), key='new_id')
|
| 557 |
+
new_doc_str = st.text_area("Document Content (in JSON format)", value='{}', height=300)
|
| 558 |
+
if st.button("โ Create New Document"):
|
| 559 |
+
try:
|
| 560 |
+
new_doc = json.loads(new_doc_str)
|
| 561 |
+
new_doc['id'] = new_id # Use the provided ID
|
| 562 |
+
success, message = insert_record(container, new_doc)
|
| 563 |
+
if success:
|
| 564 |
+
st.success(f"New document created with id: {new_doc['id']} ๐")
|
| 565 |
+
st.session_state.selected_document_id = new_doc['id']
|
| 566 |
+
# Switch to 'Show as Edit and Save' mode
|
| 567 |
+
st.rerun()
|
| 568 |
+
else:
|
| 569 |
+
st.error(message)
|
| 570 |
+
except json.JSONDecodeError as e:
|
| 571 |
+
st.error(f"Invalid JSON: {str(e)} ๐ซ")
|
| 572 |
+
|
| 573 |
else:
|
| 574 |
st.sidebar.info("No documents found in this container. ๐ญ")
|
| 575 |
|