Spaces:
Sleeping
Sleeping
new_model
Browse files- analyzer.py +1 -1
- chatbot_page.py +2 -2
analyzer.py
CHANGED
|
@@ -19,7 +19,7 @@ def analyze_code(code: str) -> str:
|
|
| 19 |
"{\n 'strength': '...', \n 'weaknesses': '...', \n 'speciality': '...', \n 'relevance rating': '...'\n}"
|
| 20 |
)
|
| 21 |
response = client.chat.completions.create(
|
| 22 |
-
model="
|
| 23 |
messages=[
|
| 24 |
{"role": "system", "content": system_prompt},
|
| 25 |
{"role": "user", "content": code}
|
|
|
|
| 19 |
"{\n 'strength': '...', \n 'weaknesses': '...', \n 'speciality': '...', \n 'relevance rating': '...'\n}"
|
| 20 |
)
|
| 21 |
response = client.chat.completions.create(
|
| 22 |
+
model="Orion-zhen/Qwen2.5-Coder-7B-Instruct-AWQ", # Updated model
|
| 23 |
messages=[
|
| 24 |
{"role": "system", "content": system_prompt},
|
| 25 |
{"role": "user", "content": code}
|
chatbot_page.py
CHANGED
|
@@ -28,7 +28,7 @@ def chat_with_user(user_message, history):
|
|
| 28 |
messages.append({"role": "assistant", "content": msg[1]})
|
| 29 |
messages.append({"role": "user", "content": user_message})
|
| 30 |
response = client.chat.completions.create(
|
| 31 |
-
model="
|
| 32 |
messages=messages,
|
| 33 |
max_tokens=256,
|
| 34 |
temperature=0.7
|
|
@@ -53,7 +53,7 @@ def extract_keywords_from_conversation(history):
|
|
| 53 |
"Conversation:\n" + conversation + "\n\nExtract about 5 keywords for Hugging Face repo search."
|
| 54 |
)
|
| 55 |
response = client.chat.completions.create(
|
| 56 |
-
model="
|
| 57 |
messages=[
|
| 58 |
{"role": "system", "content": system_prompt},
|
| 59 |
{"role": "user", "content": user_prompt}
|
|
|
|
| 28 |
messages.append({"role": "assistant", "content": msg[1]})
|
| 29 |
messages.append({"role": "user", "content": user_message})
|
| 30 |
response = client.chat.completions.create(
|
| 31 |
+
model="Orion-zhen/Qwen2.5-Coder-7B-Instruct-AWQ",
|
| 32 |
messages=messages,
|
| 33 |
max_tokens=256,
|
| 34 |
temperature=0.7
|
|
|
|
| 53 |
"Conversation:\n" + conversation + "\n\nExtract about 5 keywords for Hugging Face repo search."
|
| 54 |
)
|
| 55 |
response = client.chat.completions.create(
|
| 56 |
+
model="Orion-zhen/Qwen2.5-Coder-7B-Instruct-AWQ",
|
| 57 |
messages=[
|
| 58 |
{"role": "system", "content": system_prompt},
|
| 59 |
{"role": "user", "content": user_prompt}
|