Back to Db Gpt

Deepseek Proxy LLM Configuration

docs/docs/config-reference/llm/deepseek_deepseekdeploymodelparameters_194cbd.mdx

0.8.02.7 KB
Original Source

import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";

<ConfigDetail config={{ "name": "DeepSeekDeployModelParameters", "description": "Deepseek proxy LLM configuration.", "documentationUrl": "https://api-docs.deepseek.com/", "parameters": [ { "name": "name", "type": "string", "required": true, "description": "The name of the model." }, { "name": "backend", "type": "string", "required": false, "description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name." }, { "name": "provider", "type": "string", "required": false, "description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')", "defaultValue": "proxy/deepseek" }, { "name": "verbose", "type": "boolean", "required": false, "description": "Show verbose output.", "defaultValue": "False" }, { "name": "concurrency", "type": "integer", "required": false, "description": "Model concurrency limit", "defaultValue": "100" }, { "name": "prompt_template", "type": "string", "required": false, "description": "Prompt template. If None, the prompt template is automatically determined from model. Just for local deployment." }, { "name": "context_length", "type": "integer", "required": false, "description": "The context length of the OpenAI API. If None, it is determined by the model." }, { "name": "reasoning_model", "type": "boolean", "required": false, "description": "Whether the model is a reasoning model. If None, it is automatically determined from model." }, { "name": "api_base", "type": "string", "required": false, "description": "The base url of the DeepSeek API.", "defaultValue": "${env:DEEPSEEK_API_BASE:-https://api.deepseek.com/v1}" }, { "name": "api_key", "type": "string", "required": false, "description": "The API key of the DeepSeek API.", "defaultValue": "${env:DEEPSEEK_API_KEY}" }, { "name": "api_type", "type": "string", "required": false, "description": "The type of the OpenAI API, if you use Azure, it can be: azure" }, { "name": "api_version", "type": "string", "required": false, "description": "The version of the OpenAI API." }, { "name": "http_proxy", "type": "string", "required": false, "description": "The http or https proxy to use openai" } ] }} />