Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,13 +3,19 @@ from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings
|
|
| 3 |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
| 4 |
from llama_index.legacy.callbacks import CallbackManager
|
| 5 |
from llama_index.llms.openai_like import OpenAILike
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
# Create an instance of CallbackManager
|
| 8 |
callback_manager = CallbackManager()
|
| 9 |
|
| 10 |
api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
|
| 11 |
model = "internlm2.5-latest"
|
| 12 |
-
|
|
|
|
|
|
|
| 13 |
|
| 14 |
# api_base_url = "https://api.siliconflow.cn/v1"
|
| 15 |
# model = "internlm/internlm2_5-7b-chat"
|
|
|
|
| 3 |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
| 4 |
from llama_index.legacy.callbacks import CallbackManager
|
| 5 |
from llama_index.llms.openai_like import OpenAILike
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
|
| 10 |
|
| 11 |
# Create an instance of CallbackManager
|
| 12 |
callback_manager = CallbackManager()
|
| 13 |
|
| 14 |
api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
|
| 15 |
model = "internlm2.5-latest"
|
| 16 |
+
|
| 17 |
+
api_key = os.getenv('token')
|
| 18 |
+
#api_key = "eyJ0eXBlIjoiSldUIiwiYWxnIjoiSFM1MTIifQ.eyJqdGkiOiIxNzAwMzA3OCIsInJvbCI6IlJPTEVfUkVHSVNURVIiLCJpc3MiOiJPcGVuWExhYiIsImlhdCI6MTczNjA2MTg4OCwiY2xpZW50SWQiOiJlYm1ydm9kNnlvMG5semFlazF5cCIsInBob25lIjoiMTg0MDY1MDk1NTgiLCJ1dWlkIjoiZmJlY2JhMjItMWViNi00NWExLTkyMTMtNjM1MDEwOGYyMDMwIiwiZW1haWwiOiIiLCJleHAiOjE3NTE2MTM4ODh9.VO6K_sCFGq_srrXX5Hih47Lcv9nWdmPCVwg1fMrw1zossEvlmTwjyGrhSSr5oU4T9tr_dTBzWQHlp-0IV5kBfA"
|
| 19 |
|
| 20 |
# api_base_url = "https://api.siliconflow.cn/v1"
|
| 21 |
# model = "internlm/internlm2_5-7b-chat"
|