Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,9 +1,16 @@
|
|
|
|
|
|
|
|
| 1 |
from langchain.chains import SequentialChain, LLMChain
|
| 2 |
from langchain.prompts import PromptTemplate
|
| 3 |
from langchain_groq import ChatGroq
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
llm_judge = ChatGroq(model="deepseek-r1-distill-llama-70b")
|
| 6 |
rag_llm = ChatGroq(model="mixtral-8x7b-32768")
|
| 7 |
-
|
| 8 |
-
llm_judge.verbose = True
|
| 9 |
-
rag_llm.verbose = True
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import requests
|
| 3 |
from langchain.chains import SequentialChain, LLMChain
|
| 4 |
from langchain.prompts import PromptTemplate
|
| 5 |
from langchain_groq import ChatGroq
|
| 6 |
+
from langchain.document_loaders import PDFPlumberLoader
|
| 7 |
+
from langchain_experimental.text_splitter import SemanticChunker
|
| 8 |
+
from langchain_huggingface import HuggingFaceEmbeddings
|
| 9 |
+
from langchain_chroma import Chroma
|
| 10 |
|
| 11 |
+
# Set API Keys
|
| 12 |
+
os.environ["GROQ_API_KEY"] = st.secrets.get("GROQ_API_KEY", "")
|
| 13 |
+
|
| 14 |
+
# Load LLM models
|
| 15 |
llm_judge = ChatGroq(model="deepseek-r1-distill-llama-70b")
|
| 16 |
rag_llm = ChatGroq(model="mixtral-8x7b-32768")
|
|
|
|
|
|
|
|
|