Spaces:
Sleeping
Sleeping
Update backup11.app.py
Browse files- backup11.app.py +36 -40
backup11.app.py
CHANGED
|
@@ -54,7 +54,7 @@ LOCAL_APP_URL = "https://huggingface.co/spaces/awacke1/AzureCosmosDBUI"
|
|
| 54 |
CosmosDBUrl = 'https://portal.azure.com/#@AaronCWackergmail.onmicrosoft.com/resource/subscriptions/003fba60-5b3f-48f4-ab36-3ed11bc40816/resourceGroups/datasets/providers/Microsoft.DocumentDB/databaseAccounts/acae-afd/dataExplorer'
|
| 55 |
|
| 56 |
# π€ Anthropic configuration - Teaching machines to be more human (and funnier)
|
| 57 |
-
|
| 58 |
|
| 59 |
# π§ Initialize session state - Because even apps need a good memory
|
| 60 |
if "chat_history" not in st.session_state:
|
|
@@ -311,22 +311,33 @@ def archive_current_container(database_name, container_name, client):
|
|
| 311 |
except Exception as e:
|
| 312 |
return f"An error occurred while archiving data: {str(e)} π’"
|
| 313 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 314 |
|
| 315 |
# π Search glossary - Finding needles in digital haystacks
|
| 316 |
def search_glossary(query):
|
| 317 |
st.markdown(f"### π SearchGlossary for: {query}")
|
| 318 |
-
|
| 319 |
-
|
| 320 |
-
#model_choice = st.selectbox('π§ Select LLM Model', options=model_options, index=1)
|
| 321 |
-
# Dropdown for database selection
|
| 322 |
database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)']
|
| 323 |
-
|
|
|
|
| 324 |
# π΅οΈββοΈ Searching the glossary for: query
|
| 325 |
all_results = ""
|
| 326 |
-
#
|
| 327 |
-
|
|
|
|
| 328 |
|
| 329 |
-
# π
|
| 330 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
| 331 |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
|
| 332 |
result = client.predict(
|
|
@@ -351,11 +362,6 @@ def search_glossary(query):
|
|
| 351 |
st.markdown(result2)
|
| 352 |
#st.code(result2, language="python", line_numbers=True)
|
| 353 |
|
| 354 |
-
|
| 355 |
-
|
| 356 |
-
|
| 357 |
-
|
| 358 |
-
|
| 359 |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /update_with_rag_md
|
| 360 |
response2 = client.predict(
|
| 361 |
message=query, # str in 'parameter_13' Textbox component
|
|
@@ -373,33 +379,23 @@ def search_glossary(query):
|
|
| 373 |
#st.code(response2[1], language="python", line_numbers=True, wrap_lines=True)
|
| 374 |
|
| 375 |
|
| 376 |
-
# Persist AI Results to Markdown Files
|
| 377 |
-
|
| 378 |
-
|
| 379 |
-
|
| 380 |
-
|
| 381 |
-
|
| 382 |
-
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
|
| 387 |
-
|
| 388 |
-
|
| 389 |
-
|
| 390 |
-
|
| 391 |
-
|
| 392 |
-
#st.session_state.chat_history.append({"assistant": query, "ArXiV": response2[0]})
|
| 393 |
-
except:
|
| 394 |
-
st.markdown('3 error')
|
| 395 |
-
try:
|
| 396 |
-
filename = generate_filename(response2[1], "md")
|
| 397 |
-
create_file(filename, query, response2[1])
|
| 398 |
-
#st.session_state.chat_history.append({"assistant": query, "ArXiV": response2[1]})
|
| 399 |
-
except:
|
| 400 |
-
st.markdown('4 error')
|
| 401 |
|
| 402 |
-
|
| 403 |
return result, result2, response2
|
| 404 |
|
| 405 |
|
|
|
|
| 54 |
CosmosDBUrl = 'https://portal.azure.com/#@AaronCWackergmail.onmicrosoft.com/resource/subscriptions/003fba60-5b3f-48f4-ab36-3ed11bc40816/resourceGroups/datasets/providers/Microsoft.DocumentDB/databaseAccounts/acae-afd/dataExplorer'
|
| 55 |
|
| 56 |
# π€ Anthropic configuration - Teaching machines to be more human (and funnier)
|
| 57 |
+
anthropicclient = anthropic.Anthropic(api_key=os.environ.get("ANTHROPIC_API_KEY"))
|
| 58 |
|
| 59 |
# π§ Initialize session state - Because even apps need a good memory
|
| 60 |
if "chat_history" not in st.session_state:
|
|
|
|
| 311 |
except Exception as e:
|
| 312 |
return f"An error occurred while archiving data: {str(e)} π’"
|
| 313 |
|
| 314 |
+
def gen_AI_IO_filename(display_query, output):
|
| 315 |
+
# Get current time in Central Time Zone with milliseconds
|
| 316 |
+
now_central = datetime.now(pytz.timezone("America/Chicago"))
|
| 317 |
+
timestamp = now_central.strftime("%Y-%m-%d-%I-%M-%S-%f-%p")
|
| 318 |
+
|
| 319 |
+
# Limit components to prevent excessive filename length
|
| 320 |
+
display_query = display_query[:50] # Truncate display_query to 50 chars
|
| 321 |
+
output_snippet = re.sub(r'[^A-Za-z0-9]+', '_', output[:100]) # Truncate output_snippet to 100 chars
|
| 322 |
+
|
| 323 |
+
filename = f"{timestamp} - {display_query} - {output_snippet}.md"
|
| 324 |
+
return filename
|
| 325 |
|
| 326 |
# π Search glossary - Finding needles in digital haystacks
|
| 327 |
def search_glossary(query):
|
| 328 |
st.markdown(f"### π SearchGlossary for: {query}")
|
| 329 |
+
model_options = ['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2']
|
| 330 |
+
model_choice = st.selectbox('π§ Select LLM Model', options=model_options, index=1, key=f"model_choice_{id(query)}")
|
|
|
|
|
|
|
| 331 |
database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)']
|
| 332 |
+
database_choice = st.selectbox('π Select Database', options=database_options, index=0, key=f"database_choice_{id(query)}")
|
| 333 |
+
|
| 334 |
# π΅οΈββοΈ Searching the glossary for: query
|
| 335 |
all_results = ""
|
| 336 |
+
# Limit the query display to 80 characters
|
| 337 |
+
display_query = query[:80] + "..." if len(query) > 80 else query
|
| 338 |
+
st.markdown(f"π΅οΈββοΈ Running ArXiV AI Analysis with Query: {display_query} - ML model: {model_choice} and Option: {database_options}")
|
| 339 |
|
| 340 |
+
# π ArXiV RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
|
| 341 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
| 342 |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
|
| 343 |
result = client.predict(
|
|
|
|
| 362 |
st.markdown(result2)
|
| 363 |
#st.code(result2, language="python", line_numbers=True)
|
| 364 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 365 |
# π ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /update_with_rag_md
|
| 366 |
response2 = client.predict(
|
| 367 |
message=query, # str in 'parameter_13' Textbox component
|
|
|
|
| 379 |
#st.code(response2[1], language="python", line_numbers=True, wrap_lines=True)
|
| 380 |
|
| 381 |
|
| 382 |
+
# β
Persist AI Results to Markdown Files
|
| 383 |
+
filename = gen_AI_IO_filename(display_query, result)
|
| 384 |
+
create_file(filename, query, result)
|
| 385 |
+
st.markdown(f"β
File saved as: `{filename}`")
|
| 386 |
+
|
| 387 |
+
filename = gen_AI_IO_filename(display_query, result2)
|
| 388 |
+
create_file(filename, query, result2)
|
| 389 |
+
st.markdown(f"β
File saved as: `{filename}`")
|
| 390 |
+
|
| 391 |
+
filename = gen_AI_IO_filename(display_query, response2[0])
|
| 392 |
+
create_file(filename, query, response2[0])
|
| 393 |
+
st.markdown(f"β
File saved as: `{filename}`")
|
| 394 |
+
|
| 395 |
+
filename = gen_AI_IO_filename(display_query, response2[1])
|
| 396 |
+
create_file(filename, query, response2[1])
|
| 397 |
+
st.markdown(f"β
File saved as: `{filename}`")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 398 |
|
|
|
|
| 399 |
return result, result2, response2
|
| 400 |
|
| 401 |
|