Spaces:
Sleeping
Sleeping
switch to gpt-4-turbo by default
Browse files- .env +1 -1
- README.md +2 -2
- src/app/interface/settings-dialog/defaultSettings.ts +1 -1
- src/app/queries/predictWithOpenAI.ts +1 -1
.env
CHANGED
|
@@ -80,7 +80,7 @@ LLM_GROQ_API_MODEL="mixtral-8x7b-32768"
|
|
| 80 |
|
| 81 |
# If you decide to use OpenAI for the LLM engine
|
| 82 |
LLM_OPENAI_API_BASE_URL="https://api.openai.com/v1"
|
| 83 |
-
LLM_OPENAI_API_MODEL="gpt-4"
|
| 84 |
|
| 85 |
# If you decide to use Anthropic (eg. Claude) for the LLM engine
|
| 86 |
# https://docs.anthropic.com/claude/docs/models-overview
|
|
|
|
| 80 |
|
| 81 |
# If you decide to use OpenAI for the LLM engine
|
| 82 |
LLM_OPENAI_API_BASE_URL="https://api.openai.com/v1"
|
| 83 |
+
LLM_OPENAI_API_MODEL="gpt-4-turbo"
|
| 84 |
|
| 85 |
# If you decide to use Anthropic (eg. Claude) for the LLM engine
|
| 86 |
# https://docs.anthropic.com/claude/docs/models-overview
|
README.md
CHANGED
|
@@ -55,7 +55,7 @@ Language model config (depending on the LLM engine you decide to use):
|
|
| 55 |
- `LLM_HF_INFERENCE_ENDPOINT_URL`: "<use your own>"
|
| 56 |
- `LLM_HF_INFERENCE_API_MODEL`: "HuggingFaceH4/zephyr-7b-beta"
|
| 57 |
- `LLM_OPENAI_API_BASE_URL`: "https://api.openai.com/v1"
|
| 58 |
-
- `LLM_OPENAI_API_MODEL`: "gpt-4"
|
| 59 |
- `LLM_GROQ_API_MODEL`: "mixtral-8x7b-32768"
|
| 60 |
- `LLM_ANTHROPIC_API_MODEL`: "claude-3-opus-20240229"
|
| 61 |
|
|
@@ -123,7 +123,7 @@ LLM_ENGINE="OPENAI"
|
|
| 123 |
# default openai api base url is: https://api.openai.com/v1
|
| 124 |
LLM_OPENAI_API_BASE_URL="A custom OpenAI API Base URL if you have some special privileges"
|
| 125 |
|
| 126 |
-
LLM_OPENAI_API_MODEL="gpt-4-turbo
|
| 127 |
|
| 128 |
AUTH_OPENAI_API_KEY="Yourown OpenAI API Key"
|
| 129 |
```
|
|
|
|
| 55 |
- `LLM_HF_INFERENCE_ENDPOINT_URL`: "<use your own>"
|
| 56 |
- `LLM_HF_INFERENCE_API_MODEL`: "HuggingFaceH4/zephyr-7b-beta"
|
| 57 |
- `LLM_OPENAI_API_BASE_URL`: "https://api.openai.com/v1"
|
| 58 |
+
- `LLM_OPENAI_API_MODEL`: "gpt-4-turbo"
|
| 59 |
- `LLM_GROQ_API_MODEL`: "mixtral-8x7b-32768"
|
| 60 |
- `LLM_ANTHROPIC_API_MODEL`: "claude-3-opus-20240229"
|
| 61 |
|
|
|
|
| 123 |
# default openai api base url is: https://api.openai.com/v1
|
| 124 |
LLM_OPENAI_API_BASE_URL="A custom OpenAI API Base URL if you have some special privileges"
|
| 125 |
|
| 126 |
+
LLM_OPENAI_API_MODEL="gpt-4-turbo"
|
| 127 |
|
| 128 |
AUTH_OPENAI_API_KEY="Yourown OpenAI API Key"
|
| 129 |
```
|
src/app/interface/settings-dialog/defaultSettings.ts
CHANGED
|
@@ -15,7 +15,7 @@ export const defaultSettings: Settings = {
|
|
| 15 |
replicateApiModelTrigger: "",
|
| 16 |
openaiApiKey: "",
|
| 17 |
openaiApiModel: "dall-e-3",
|
| 18 |
-
openaiApiLanguageModel: "gpt-4-turbo
|
| 19 |
groqApiKey: "",
|
| 20 |
groqApiLanguageModel: "mixtral-8x7b-32768",
|
| 21 |
anthropicApiKey: "",
|
|
|
|
| 15 |
replicateApiModelTrigger: "",
|
| 16 |
openaiApiKey: "",
|
| 17 |
openaiApiModel: "dall-e-3",
|
| 18 |
+
openaiApiLanguageModel: "gpt-4-turbo",
|
| 19 |
groqApiKey: "",
|
| 20 |
groqApiLanguageModel: "mixtral-8x7b-32768",
|
| 21 |
anthropicApiKey: "",
|
src/app/queries/predictWithOpenAI.ts
CHANGED
|
@@ -18,7 +18,7 @@ export async function predict({
|
|
| 18 |
const openaiApiModel = `${
|
| 19 |
llmVendorConfig.modelId ||
|
| 20 |
process.env.LLM_OPENAI_API_MODEL ||
|
| 21 |
-
"gpt-4-turbo
|
| 22 |
}`
|
| 23 |
|
| 24 |
const openaiApiBaseUrl = `${process.env.LLM_OPENAI_API_BASE_URL || "https://api.openai.com/v1"}`
|
|
|
|
| 18 |
const openaiApiModel = `${
|
| 19 |
llmVendorConfig.modelId ||
|
| 20 |
process.env.LLM_OPENAI_API_MODEL ||
|
| 21 |
+
"gpt-4-turbo"
|
| 22 |
}`
|
| 23 |
|
| 24 |
const openaiApiBaseUrl = `${process.env.LLM_OPENAI_API_BASE_URL || "https://api.openai.com/v1"}`
|