Update app.py
Browse files
app.py
CHANGED
|
@@ -856,11 +856,10 @@ def call_cohere_api(payload, api_key_override=None):
|
|
| 856 |
temperature = payload.get("temperature", 0.7)
|
| 857 |
max_tokens = payload.get("max_tokens", 1000)
|
| 858 |
|
| 859 |
-
# Create chat completion
|
| 860 |
-
# The ClientV2's chat method expects 'messages' parameter, not 'message'
|
| 861 |
response = client.chat(
|
| 862 |
model=model,
|
| 863 |
-
messages=messages,
|
| 864 |
temperature=temperature,
|
| 865 |
max_tokens=max_tokens
|
| 866 |
)
|
|
@@ -870,6 +869,72 @@ def call_cohere_api(payload, api_key_override=None):
|
|
| 870 |
logger.error(f"Cohere API error: {str(e)}")
|
| 871 |
raise e
|
| 872 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 873 |
def call_together_api(payload, api_key_override=None):
|
| 874 |
"""Make a call to Together API with error handling"""
|
| 875 |
try:
|
|
@@ -1088,63 +1153,6 @@ def call_googleai_api(payload, api_key_override=None):
|
|
| 1088 |
logger.error(f"Google AI API error: {str(e)}")
|
| 1089 |
raise e
|
| 1090 |
|
| 1091 |
-
def extract_ai_response(result, provider):
|
| 1092 |
-
"""Extract AI response based on provider format"""
|
| 1093 |
-
try:
|
| 1094 |
-
if provider == "OpenRouter":
|
| 1095 |
-
if isinstance(result, dict):
|
| 1096 |
-
if "choices" in result and len(result["choices"]) > 0:
|
| 1097 |
-
if "message" in result["choices"][0]:
|
| 1098 |
-
message = result["choices"][0]["message"]
|
| 1099 |
-
if message.get("reasoning") and not message.get("content"):
|
| 1100 |
-
reasoning = message.get("reasoning")
|
| 1101 |
-
lines = reasoning.strip().split('\n')
|
| 1102 |
-
for line in lines:
|
| 1103 |
-
if line and not line.startswith('I should') and not line.startswith('Let me'):
|
| 1104 |
-
return line.strip()
|
| 1105 |
-
for line in lines:
|
| 1106 |
-
if line.strip():
|
| 1107 |
-
return line.strip()
|
| 1108 |
-
return message.get("content", "")
|
| 1109 |
-
elif "delta" in result["choices"][0]:
|
| 1110 |
-
return result["choices"][0]["delta"].get("content", "")
|
| 1111 |
-
|
| 1112 |
-
elif provider == "OpenAI":
|
| 1113 |
-
if hasattr(result, "choices") and len(result.choices) > 0:
|
| 1114 |
-
return result.choices[0].message.content
|
| 1115 |
-
|
| 1116 |
-
elif provider == "HuggingFace":
|
| 1117 |
-
return result.get("generated_text", "")
|
| 1118 |
-
|
| 1119 |
-
elif provider == "Groq":
|
| 1120 |
-
if hasattr(result, "choices") and len(result.choices) > 0:
|
| 1121 |
-
return result.choices[0].message.content
|
| 1122 |
-
|
| 1123 |
-
elif provider == "Cohere":
|
| 1124 |
-
if hasattr(result, "text"):
|
| 1125 |
-
return result.text
|
| 1126 |
-
|
| 1127 |
-
elif provider == "Together":
|
| 1128 |
-
if hasattr(result, "choices") and len(result.choices) > 0:
|
| 1129 |
-
return result.choices[0].message.content
|
| 1130 |
-
|
| 1131 |
-
elif provider == "OVH":
|
| 1132 |
-
if isinstance(result, dict) and "choices" in result and len(result["choices"]) > 0:
|
| 1133 |
-
return result["choices"][0]["message"]["content"]
|
| 1134 |
-
|
| 1135 |
-
elif provider == "Cerebras":
|
| 1136 |
-
if isinstance(result, dict) and "choices" in result and len(result["choices"]) > 0:
|
| 1137 |
-
return result["choices"][0]["message"]["content"]
|
| 1138 |
-
|
| 1139 |
-
elif provider == "GoogleAI":
|
| 1140 |
-
if isinstance(result, dict) and "choices" in result and len(result["choices"]) > 0:
|
| 1141 |
-
return result["choices"][0]["message"]["content"]
|
| 1142 |
-
|
| 1143 |
-
logger.error(f"Unexpected response structure from {provider}: {result}")
|
| 1144 |
-
return f"Error: Could not extract response from {provider} API result"
|
| 1145 |
-
except Exception as e:
|
| 1146 |
-
logger.error(f"Error extracting AI response: {str(e)}")
|
| 1147 |
-
return f"Error: {str(e)}"
|
| 1148 |
|
| 1149 |
# ==========================================================
|
| 1150 |
# STREAMING HANDLERS
|
|
|
|
| 856 |
temperature = payload.get("temperature", 0.7)
|
| 857 |
max_tokens = payload.get("max_tokens", 1000)
|
| 858 |
|
| 859 |
+
# Create chat completion
|
|
|
|
| 860 |
response = client.chat(
|
| 861 |
model=model,
|
| 862 |
+
messages=messages,
|
| 863 |
temperature=temperature,
|
| 864 |
max_tokens=max_tokens
|
| 865 |
)
|
|
|
|
| 869 |
logger.error(f"Cohere API error: {str(e)}")
|
| 870 |
raise e
|
| 871 |
|
| 872 |
+
def extract_ai_response(result, provider):
|
| 873 |
+
"""Extract AI response based on provider format"""
|
| 874 |
+
try:
|
| 875 |
+
if provider == "OpenRouter":
|
| 876 |
+
if isinstance(result, dict):
|
| 877 |
+
if "choices" in result and len(result["choices"]) > 0:
|
| 878 |
+
if "message" in result["choices"][0]:
|
| 879 |
+
message = result["choices"][0]["message"]
|
| 880 |
+
if message.get("reasoning") and not message.get("content"):
|
| 881 |
+
reasoning = message.get("reasoning")
|
| 882 |
+
lines = reasoning.strip().split('\n')
|
| 883 |
+
for line in lines:
|
| 884 |
+
if line and not line.startswith('I should') and not line.startswith('Let me'):
|
| 885 |
+
return line.strip()
|
| 886 |
+
for line in lines:
|
| 887 |
+
if line.strip():
|
| 888 |
+
return line.strip()
|
| 889 |
+
return message.get("content", "")
|
| 890 |
+
elif "delta" in result["choices"][0]:
|
| 891 |
+
return result["choices"][0]["delta"].get("content", "")
|
| 892 |
+
|
| 893 |
+
elif provider == "OpenAI":
|
| 894 |
+
if hasattr(result, "choices") and len(result.choices) > 0:
|
| 895 |
+
return result.choices[0].message.content
|
| 896 |
+
|
| 897 |
+
elif provider == "HuggingFace":
|
| 898 |
+
return result.get("generated_text", "")
|
| 899 |
+
|
| 900 |
+
elif provider == "Groq":
|
| 901 |
+
if hasattr(result, "choices") and len(result.choices) > 0:
|
| 902 |
+
return result.choices[0].message.content
|
| 903 |
+
|
| 904 |
+
elif provider == "Cohere":
|
| 905 |
+
# Specific handling for Cohere's response format
|
| 906 |
+
if hasattr(result, "message") and hasattr(result.message, "content"):
|
| 907 |
+
# Extract text from content items
|
| 908 |
+
text_content = ""
|
| 909 |
+
for content_item in result.message.content:
|
| 910 |
+
if hasattr(content_item, "text") and content_item.text:
|
| 911 |
+
text_content += content_item.text
|
| 912 |
+
return text_content
|
| 913 |
+
else:
|
| 914 |
+
return "No response content from Cohere"
|
| 915 |
+
|
| 916 |
+
elif provider == "Together":
|
| 917 |
+
if hasattr(result, "choices") and len(result.choices) > 0:
|
| 918 |
+
return result.choices[0].message.content
|
| 919 |
+
|
| 920 |
+
elif provider == "OVH":
|
| 921 |
+
if isinstance(result, dict) and "choices" in result and len(result["choices"]) > 0:
|
| 922 |
+
return result["choices"][0]["message"]["content"]
|
| 923 |
+
|
| 924 |
+
elif provider == "Cerebras":
|
| 925 |
+
if isinstance(result, dict) and "choices" in result and len(result["choices"]) > 0:
|
| 926 |
+
return result["choices"][0]["message"]["content"]
|
| 927 |
+
|
| 928 |
+
elif provider == "GoogleAI":
|
| 929 |
+
if isinstance(result, dict) and "choices" in result and len(result["choices"]) > 0:
|
| 930 |
+
return result["choices"][0]["message"]["content"]
|
| 931 |
+
|
| 932 |
+
logger.error(f"Unexpected response structure from {provider}: {result}")
|
| 933 |
+
return f"Error: Could not extract response from {provider} API result"
|
| 934 |
+
except Exception as e:
|
| 935 |
+
logger.error(f"Error extracting AI response: {str(e)}")
|
| 936 |
+
return f"Error: {str(e)}"
|
| 937 |
+
|
| 938 |
def call_together_api(payload, api_key_override=None):
|
| 939 |
"""Make a call to Together API with error handling"""
|
| 940 |
try:
|
|
|
|
| 1153 |
logger.error(f"Google AI API error: {str(e)}")
|
| 1154 |
raise e
|
| 1155 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1156 |
|
| 1157 |
# ==========================================================
|
| 1158 |
# STREAMING HANDLERS
|