### What problem does this PR solve? Feat: Support vLLM #4316 ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
parent
fefea3a2a5
commit
d9dd1171a3
59
web/src/assets/svg/llm/vllm.svg
Normal file
59
web/src/assets/svg/llm/vllm.svg
Normal file
@ -0,0 +1,59 @@
|
||||
<svg version="1.2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 384 192" width="384" height="192">
|
||||
<title>vllm</title>
|
||||
<defs>
|
||||
<clipPath clipPathUnits="userSpaceOnUse" id="cp1">
|
||||
<path d="m0 0h384v192h-384z"/>
|
||||
</clipPath>
|
||||
<filter x="-50%" y="-50%" width="200%" height="200%" id="f1"> <feGaussianBlur stdDeviation="2"/> </filter>
|
||||
</defs>
|
||||
<style>
|
||||
.s0 { fill: none }
|
||||
.s1 { fill: #434343 }
|
||||
.s2 { fill: #d9d9d9 }
|
||||
.s3 { filter: url(#f1);fill: #d9d9d9 }
|
||||
.s4 { filter: url(#f1);fill: #fdb515 }
|
||||
.s5 { fill: #fdb515 }
|
||||
.s6 { filter: url(#f1);fill: #30a2ff }
|
||||
.s7 { fill: #30a2ff }
|
||||
</style>
|
||||
<g id="Clip-Path" clip-path="url(#cp1)">
|
||||
<g>
|
||||
<path fill-rule="evenodd" class="s0" d="m99.8 12.6h253.4v166.8h-253.4z"/>
|
||||
<path class="s1" d="m172.7 136.5h-58.9v-93.1h12.4v82.1h46.5zm71.2 0h-58.9v-93.1h12.4v82.1h46.5zm95.3 0h-12.4v-80.2l-25.9 54.6h-7.4l-25.6-54.6v80.2h-11.6v-93.1h16.9l24.8 51.8 24-51.8h17.2z"/>
|
||||
<path fill-rule="evenodd" class="s2" d="m58.5 134.8h1.9v2.4h-1.9z"/>
|
||||
<path fill-rule="evenodd" class="s2" d="m59.6 136h1.9v2.3h-1.9z"/>
|
||||
<g>
|
||||
<g id="g31e21232314_0_0.1">
|
||||
<path fill-rule="evenodd" class="s3" d="m59.6 85v55.3l-27.6-55.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g id="g31e21232314_0_0.1">
|
||||
<path fill-rule="evenodd" class="s2" d="m59.6 83v55.3l-27.6-55.3z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g id="g31e21232314_0_0.2">
|
||||
<path fill-rule="evenodd" class="s3" d="m59.6 140.3h21.8l18.6-70.4-25.6 13.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g id="g31e21232314_0_0.2">
|
||||
<path fill-rule="evenodd" class="s2" d="m59.6 138.3h21.8l18.6-70.4-25.6 13.5z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g id="g31e21232314_0_0.3">
|
||||
<path fill-rule="evenodd" class="s4" d="m58.5 83.8v55.3l-27.7-55.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g id="g31e21232314_0_0.3">
|
||||
<path fill-rule="evenodd" class="s5" d="m58.5 81.8v55.3l-27.7-55.3z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g id="g31e21232314_0_0.4">
|
||||
<path fill-rule="evenodd" class="s6" d="m58.5 139.1h21.7l18.6-70.3-25.5 13.4z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g id="g31e21232314_0_0.4">
|
||||
<path fill-rule="evenodd" class="s7" d="m58.5 137.1h21.7l18.6-70.3-25.5 13.4z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
@ -1,4 +1,4 @@
|
||||
import { IconMap } from '@/constants/setting';
|
||||
import { IconMap } from '@/constants/llm';
|
||||
import { cn } from '@/lib/utils';
|
||||
import Icon, { UserOutlined } from '@ant-design/icons';
|
||||
import { IconComponentProps } from '@ant-design/icons/lib/components/Icon';
|
||||
|
||||
106
web/src/constants/llm.ts
Normal file
106
web/src/constants/llm.ts
Normal file
@ -0,0 +1,106 @@
|
||||
export enum LLMFactory {
|
||||
TongYiQianWen = 'Tongyi-Qianwen',
|
||||
Moonshot = 'Moonshot',
|
||||
OpenAI = 'OpenAI',
|
||||
ZhipuAI = 'ZHIPU-AI',
|
||||
WenXinYiYan = '文心一言',
|
||||
Ollama = 'Ollama',
|
||||
Xinference = 'Xinference',
|
||||
ModelScope = 'ModelScope',
|
||||
DeepSeek = 'DeepSeek',
|
||||
VolcEngine = 'VolcEngine',
|
||||
BaiChuan = 'BaiChuan',
|
||||
Jina = 'Jina',
|
||||
MiniMax = 'MiniMax',
|
||||
Mistral = 'Mistral',
|
||||
AzureOpenAI = 'Azure-OpenAI',
|
||||
Bedrock = 'Bedrock',
|
||||
Gemini = 'Gemini',
|
||||
Groq = 'Groq',
|
||||
OpenRouter = 'OpenRouter',
|
||||
LocalAI = 'LocalAI',
|
||||
StepFun = 'StepFun',
|
||||
NVIDIA = 'NVIDIA',
|
||||
LMStudio = 'LM-Studio',
|
||||
OpenAiAPICompatible = 'OpenAI-API-Compatible',
|
||||
Cohere = 'Cohere',
|
||||
LeptonAI = 'LeptonAI',
|
||||
TogetherAI = 'TogetherAI',
|
||||
PerfXCloud = 'PerfXCloud',
|
||||
Upstage = 'Upstage',
|
||||
NovitaAI = 'novita.ai',
|
||||
SILICONFLOW = 'SILICONFLOW',
|
||||
PPIO = 'PPIO',
|
||||
ZeroOneAI = '01.AI',
|
||||
Replicate = 'Replicate',
|
||||
TencentHunYuan = 'Tencent Hunyuan',
|
||||
XunFeiSpark = 'XunFei Spark',
|
||||
BaiduYiYan = 'BaiduYiyan',
|
||||
FishAudio = 'Fish Audio',
|
||||
TencentCloud = 'Tencent Cloud',
|
||||
Anthropic = 'Anthropic',
|
||||
VoyageAI = 'Voyage AI',
|
||||
GoogleCloud = 'Google Cloud',
|
||||
HuggingFace = 'HuggingFace',
|
||||
YouDao = 'Youdao',
|
||||
BAAI = 'BAAI',
|
||||
NomicAI = 'nomic-ai',
|
||||
JinaAI = 'jinaai',
|
||||
SentenceTransformers = 'sentence-transformers',
|
||||
GPUStack = 'GPUStack',
|
||||
VLLM = 'VLLM',
|
||||
}
|
||||
|
||||
// Please lowercase the file name
|
||||
export const IconMap = {
|
||||
[LLMFactory.TongYiQianWen]: 'tongyi',
|
||||
[LLMFactory.Moonshot]: 'moonshot',
|
||||
[LLMFactory.OpenAI]: 'openai',
|
||||
[LLMFactory.ZhipuAI]: 'zhipu',
|
||||
[LLMFactory.WenXinYiYan]: 'wenxin',
|
||||
[LLMFactory.Ollama]: 'ollama',
|
||||
[LLMFactory.Xinference]: 'xinference',
|
||||
[LLMFactory.ModelScope]: 'modelscope',
|
||||
[LLMFactory.DeepSeek]: 'deepseek',
|
||||
[LLMFactory.VolcEngine]: 'volc_engine',
|
||||
[LLMFactory.BaiChuan]: 'baichuan',
|
||||
[LLMFactory.Jina]: 'jina',
|
||||
[LLMFactory.MiniMax]: 'chat-minimax',
|
||||
[LLMFactory.Mistral]: 'mistral',
|
||||
[LLMFactory.AzureOpenAI]: 'azure',
|
||||
[LLMFactory.Bedrock]: 'bedrock',
|
||||
[LLMFactory.Gemini]: 'gemini',
|
||||
[LLMFactory.Groq]: 'groq-next',
|
||||
[LLMFactory.OpenRouter]: 'open-router',
|
||||
[LLMFactory.LocalAI]: 'local-ai',
|
||||
[LLMFactory.StepFun]: 'stepfun',
|
||||
[LLMFactory.NVIDIA]: 'nvidia',
|
||||
[LLMFactory.LMStudio]: 'lm-studio',
|
||||
[LLMFactory.OpenAiAPICompatible]: 'openai-api',
|
||||
[LLMFactory.Cohere]: 'cohere',
|
||||
[LLMFactory.LeptonAI]: 'lepton-ai',
|
||||
[LLMFactory.TogetherAI]: 'together-ai',
|
||||
[LLMFactory.PerfXCloud]: 'perfx-cloud',
|
||||
[LLMFactory.Upstage]: 'upstage',
|
||||
[LLMFactory.NovitaAI]: 'novita-ai',
|
||||
[LLMFactory.SILICONFLOW]: 'siliconflow',
|
||||
[LLMFactory.PPIO]: 'ppio',
|
||||
[LLMFactory.ZeroOneAI]: 'yi',
|
||||
[LLMFactory.Replicate]: 'replicate',
|
||||
[LLMFactory.TencentHunYuan]: 'hunyuan',
|
||||
[LLMFactory.XunFeiSpark]: 'spark',
|
||||
[LLMFactory.BaiduYiYan]: 'yiyan',
|
||||
[LLMFactory.FishAudio]: 'fish-audio',
|
||||
[LLMFactory.TencentCloud]: 'tencent-cloud',
|
||||
[LLMFactory.Anthropic]: 'anthropic',
|
||||
[LLMFactory.VoyageAI]: 'voyage',
|
||||
[LLMFactory.GoogleCloud]: 'google-cloud',
|
||||
[LLMFactory.HuggingFace]: 'huggingface',
|
||||
[LLMFactory.YouDao]: 'youdao',
|
||||
[LLMFactory.BAAI]: 'baai',
|
||||
[LLMFactory.NomicAI]: 'nomic-ai',
|
||||
[LLMFactory.JinaAI]: 'jina',
|
||||
[LLMFactory.SentenceTransformers]: 'sentence-transformers',
|
||||
[LLMFactory.GPUStack]: 'gpustack',
|
||||
[LLMFactory.VLLM]: 'vllm',
|
||||
};
|
||||
@ -24,59 +24,6 @@ export enum ProfileSettingRouteKey {
|
||||
Logout = 'logout',
|
||||
}
|
||||
|
||||
// Please lowercase the file name
|
||||
export const IconMap = {
|
||||
'Tongyi-Qianwen': 'tongyi',
|
||||
Moonshot: 'moonshot',
|
||||
OpenAI: 'openai',
|
||||
'ZHIPU-AI': 'zhipu',
|
||||
文心一言: 'wenxin',
|
||||
Ollama: 'ollama',
|
||||
Xinference: 'xinference',
|
||||
ModelScope: 'modelscope',
|
||||
DeepSeek: 'deepseek',
|
||||
VolcEngine: 'volc_engine',
|
||||
BaiChuan: 'baichuan',
|
||||
Jina: 'jina',
|
||||
MiniMax: 'chat-minimax',
|
||||
Mistral: 'mistral',
|
||||
'Azure-OpenAI': 'azure',
|
||||
Bedrock: 'bedrock',
|
||||
Gemini: 'gemini',
|
||||
Groq: 'groq-next',
|
||||
OpenRouter: 'open-router',
|
||||
LocalAI: 'local-ai',
|
||||
StepFun: 'stepfun',
|
||||
NVIDIA: 'nvidia',
|
||||
'LM-Studio': 'lm-studio',
|
||||
'OpenAI-API-Compatible': 'openai-api',
|
||||
Cohere: 'cohere',
|
||||
LeptonAI: 'lepton-ai',
|
||||
TogetherAI: 'together-ai',
|
||||
PerfXCloud: 'perfx-cloud',
|
||||
Upstage: 'upstage',
|
||||
'novita.ai': 'novita-ai',
|
||||
SILICONFLOW: 'siliconflow',
|
||||
PPIO: 'ppio',
|
||||
'01.AI': 'yi',
|
||||
Replicate: 'replicate',
|
||||
'Tencent Hunyuan': 'hunyuan',
|
||||
'XunFei Spark': 'spark',
|
||||
BaiduYiyan: 'yiyan',
|
||||
'Fish Audio': 'fish-audio',
|
||||
'Tencent Cloud': 'tencent-cloud',
|
||||
Anthropic: 'anthropic',
|
||||
'Voyage AI': 'voyage',
|
||||
'Google Cloud': 'google-cloud',
|
||||
HuggingFace: 'huggingface',
|
||||
Youdao: 'youdao',
|
||||
BAAI: 'baai',
|
||||
'nomic-ai': 'nomic-ai',
|
||||
jinaai: 'jina',
|
||||
'sentence-transformers': 'sentence-transformers',
|
||||
GPUStack: 'gpustack',
|
||||
};
|
||||
|
||||
export const TimezoneList = [
|
||||
'UTC-11\tPacific/Midway',
|
||||
'UTC-11\tPacific/Niue',
|
||||
|
||||
@ -6,6 +6,7 @@ import {
|
||||
ProfileIcon,
|
||||
TeamIcon,
|
||||
} from '@/assets/icon/Icon';
|
||||
import { LLMFactory } from '@/constants/llm';
|
||||
import { UserSettingRouteKey } from '@/constants/setting';
|
||||
import { MonitorOutlined } from '@ant-design/icons';
|
||||
|
||||
@ -22,17 +23,18 @@ export const UserSettingIconMap = {
|
||||
export * from '@/constants/setting';
|
||||
|
||||
export const LocalLlmFactories = [
|
||||
'Ollama',
|
||||
'Xinference',
|
||||
'LocalAI',
|
||||
'LM-Studio',
|
||||
'OpenAI-API-Compatible',
|
||||
'TogetherAI',
|
||||
'Replicate',
|
||||
'OpenRouter',
|
||||
'HuggingFace',
|
||||
'GPUStack',
|
||||
'ModelScope',
|
||||
LLMFactory.Ollama,
|
||||
LLMFactory.Xinference,
|
||||
LLMFactory.LocalAI,
|
||||
LLMFactory.LMStudio,
|
||||
LLMFactory.OpenAiAPICompatible,
|
||||
LLMFactory.TogetherAI,
|
||||
LLMFactory.Replicate,
|
||||
LLMFactory.OpenRouter,
|
||||
LLMFactory.HuggingFace,
|
||||
LLMFactory.GPUStack,
|
||||
LLMFactory.ModelScope,
|
||||
LLMFactory.VLLM,
|
||||
];
|
||||
|
||||
export enum TenantRole {
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { IModalManagerChildrenProps } from '@/components/modal-manager';
|
||||
import { LLMFactory } from '@/constants/llm';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { Form, Input, Modal } from 'antd';
|
||||
import { useEffect } from 'react';
|
||||
@ -18,7 +19,7 @@ type FieldType = {
|
||||
group_id?: string;
|
||||
};
|
||||
|
||||
const modelsWithBaseUrl = ['OpenAI', 'Azure-OpenAI'];
|
||||
const modelsWithBaseUrl = [LLMFactory.OpenAI, LLMFactory.AzureOpenAI];
|
||||
|
||||
const ApiKeyModal = ({
|
||||
visible,
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { ReactComponent as MoreModelIcon } from '@/assets/svg/more-model.svg';
|
||||
import { LlmIcon } from '@/components/svg-icon';
|
||||
import { useTheme } from '@/components/theme-provider';
|
||||
import { LLMFactory } from '@/constants/llm';
|
||||
import { useSetModalState, useTranslate } from '@/hooks/common-hooks';
|
||||
import { LlmItem, useSelectLlmList } from '@/hooks/llm-hooks';
|
||||
import { CloseCircleOutlined, SettingOutlined } from '@ant-design/icons';
|
||||
@ -94,14 +95,14 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
|
||||
<Button onClick={handleApiKeyClick}>
|
||||
<Flex align="center" gap={4}>
|
||||
{isLocalLlmFactory(item.name) ||
|
||||
item.name === 'VolcEngine' ||
|
||||
item.name === 'Tencent Hunyuan' ||
|
||||
item.name === 'XunFei Spark' ||
|
||||
item.name === 'BaiduYiyan' ||
|
||||
item.name === 'Fish Audio' ||
|
||||
item.name === 'Tencent Cloud' ||
|
||||
item.name === 'Google Cloud' ||
|
||||
item.name === 'Azure OpenAI'
|
||||
item.name === LLMFactory.VolcEngine ||
|
||||
item.name === LLMFactory.TencentHunYuan ||
|
||||
item.name === LLMFactory.XunFeiSpark ||
|
||||
item.name === LLMFactory.BaiduYiYan ||
|
||||
item.name === LLMFactory.FishAudio ||
|
||||
item.name === LLMFactory.TencentCloud ||
|
||||
item.name === LLMFactory.GoogleCloud ||
|
||||
item.name === LLMFactory.AzureOpenAI
|
||||
? t('addTheModel')
|
||||
: 'API-Key'}
|
||||
<SettingOutlined />
|
||||
@ -248,15 +249,15 @@ const UserSettingModel = () => {
|
||||
|
||||
const ModalMap = useMemo(
|
||||
() => ({
|
||||
Bedrock: showBedrockAddingModal,
|
||||
VolcEngine: showVolcAddingModal,
|
||||
'Tencent Hunyuan': showHunyuanAddingModal,
|
||||
'XunFei Spark': showSparkAddingModal,
|
||||
BaiduYiyan: showyiyanAddingModal,
|
||||
'Fish Audio': showFishAudioAddingModal,
|
||||
'Tencent Cloud': showTencentCloudAddingModal,
|
||||
'Google Cloud': showGoogleAddingModal,
|
||||
'Azure-OpenAI': showAzureAddingModal,
|
||||
[LLMFactory.Bedrock]: showBedrockAddingModal,
|
||||
[LLMFactory.VolcEngine]: showVolcAddingModal,
|
||||
[LLMFactory.TencentHunYuan]: showHunyuanAddingModal,
|
||||
[LLMFactory.XunFeiSpark]: showSparkAddingModal,
|
||||
[LLMFactory.BaiduYiYan]: showyiyanAddingModal,
|
||||
[LLMFactory.FishAudio]: showFishAudioAddingModal,
|
||||
[LLMFactory.TencentCloud]: showTencentCloudAddingModal,
|
||||
[LLMFactory.GoogleCloud]: showGoogleAddingModal,
|
||||
[LLMFactory.AzureOpenAI]: showAzureAddingModal,
|
||||
}),
|
||||
[
|
||||
showBedrockAddingModal,
|
||||
@ -396,63 +397,63 @@ const UserSettingModel = () => {
|
||||
hideModal={hideVolcAddingModal}
|
||||
onOk={onVolcAddingOk}
|
||||
loading={volcAddingLoading}
|
||||
llmFactory={'VolcEngine'}
|
||||
llmFactory={LLMFactory.VolcEngine}
|
||||
></VolcEngineModal>
|
||||
<HunyuanModal
|
||||
visible={HunyuanAddingVisible}
|
||||
hideModal={hideHunyuanAddingModal}
|
||||
onOk={onHunyuanAddingOk}
|
||||
loading={HunyuanAddingLoading}
|
||||
llmFactory={'Tencent Hunyuan'}
|
||||
llmFactory={LLMFactory.TencentHunYuan}
|
||||
></HunyuanModal>
|
||||
<GoogleModal
|
||||
visible={GoogleAddingVisible}
|
||||
hideModal={hideGoogleAddingModal}
|
||||
onOk={onGoogleAddingOk}
|
||||
loading={GoogleAddingLoading}
|
||||
llmFactory={'Google Cloud'}
|
||||
llmFactory={LLMFactory.GoogleCloud}
|
||||
></GoogleModal>
|
||||
<TencentCloudModal
|
||||
visible={TencentCloudAddingVisible}
|
||||
hideModal={hideTencentCloudAddingModal}
|
||||
onOk={onTencentCloudAddingOk}
|
||||
loading={TencentCloudAddingLoading}
|
||||
llmFactory={'Tencent Cloud'}
|
||||
llmFactory={LLMFactory.TencentCloud}
|
||||
></TencentCloudModal>
|
||||
<SparkModal
|
||||
visible={SparkAddingVisible}
|
||||
hideModal={hideSparkAddingModal}
|
||||
onOk={onSparkAddingOk}
|
||||
loading={SparkAddingLoading}
|
||||
llmFactory={'XunFei Spark'}
|
||||
llmFactory={LLMFactory.XunFeiSpark}
|
||||
></SparkModal>
|
||||
<YiyanModal
|
||||
visible={yiyanAddingVisible}
|
||||
hideModal={hideyiyanAddingModal}
|
||||
onOk={onyiyanAddingOk}
|
||||
loading={yiyanAddingLoading}
|
||||
llmFactory={'BaiduYiyan'}
|
||||
llmFactory={LLMFactory.BaiduYiYan}
|
||||
></YiyanModal>
|
||||
<FishAudioModal
|
||||
visible={FishAudioAddingVisible}
|
||||
hideModal={hideFishAudioAddingModal}
|
||||
onOk={onFishAudioAddingOk}
|
||||
loading={FishAudioAddingLoading}
|
||||
llmFactory={'Fish Audio'}
|
||||
llmFactory={LLMFactory.FishAudio}
|
||||
></FishAudioModal>
|
||||
<BedrockModal
|
||||
visible={bedrockAddingVisible}
|
||||
hideModal={hideBedrockAddingModal}
|
||||
onOk={onBedrockAddingOk}
|
||||
loading={bedrockAddingLoading}
|
||||
llmFactory={'Bedrock'}
|
||||
llmFactory={LLMFactory.Bedrock}
|
||||
></BedrockModal>
|
||||
<AzureOpenAIModal
|
||||
visible={AzureAddingVisible}
|
||||
hideModal={hideAzureAddingModal}
|
||||
onOk={onAzureAddingOk}
|
||||
loading={AzureAddingLoading}
|
||||
llmFactory={'Azure-OpenAI'}
|
||||
llmFactory={LLMFactory.AzureOpenAI}
|
||||
></AzureOpenAIModal>
|
||||
</section>
|
||||
);
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { LLMFactory } from '@/constants/llm';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
@ -18,19 +19,23 @@ type FieldType = IAddLlmRequestBody & { vision: boolean };
|
||||
const { Option } = Select;
|
||||
|
||||
const llmFactoryToUrlMap = {
|
||||
Ollama:
|
||||
[LLMFactory.Ollama]:
|
||||
'https://github.com/infiniflow/ragflow/blob/main/docs/guides/deploy_local_llm.mdx',
|
||||
Xinference: 'https://inference.readthedocs.io/en/latest/user_guide',
|
||||
ModelScope: 'https://www.modelscope.cn/docs/model-service/API-Inference/intro',
|
||||
LocalAI: 'https://localai.io/docs/getting-started/models/',
|
||||
'LM-Studio': 'https://lmstudio.ai/docs/basics',
|
||||
'OpenAI-API-Compatible': 'https://platform.openai.com/docs/models/gpt-4',
|
||||
TogetherAI: 'https://docs.together.ai/docs/deployment-options',
|
||||
Replicate: 'https://replicate.com/docs/topics/deployments',
|
||||
OpenRouter: 'https://openrouter.ai/docs',
|
||||
HuggingFace:
|
||||
[LLMFactory.Xinference]:
|
||||
'https://inference.readthedocs.io/en/latest/user_guide',
|
||||
[LLMFactory.ModelScope]:
|
||||
'https://www.modelscope.cn/docs/model-service/API-Inference/intro',
|
||||
[LLMFactory.LocalAI]: 'https://localai.io/docs/getting-started/models/',
|
||||
[LLMFactory.LMStudio]: 'https://lmstudio.ai/docs/basics',
|
||||
[LLMFactory.OpenAiAPICompatible]:
|
||||
'https://platform.openai.com/docs/models/gpt-4',
|
||||
[LLMFactory.TogetherAI]: 'https://docs.together.ai/docs/deployment-options',
|
||||
[LLMFactory.Replicate]: 'https://replicate.com/docs/topics/deployments',
|
||||
[LLMFactory.OpenRouter]: 'https://openrouter.ai/docs',
|
||||
[LLMFactory.HuggingFace]:
|
||||
'https://huggingface.co/docs/text-embeddings-inference/quick_tour',
|
||||
GPUStack: 'https://docs.gpustack.ai/latest/quickstart',
|
||||
[LLMFactory.GPUStack]: 'https://docs.gpustack.ai/latest/quickstart',
|
||||
[LLMFactory.VLLM]: 'https://docs.vllm.ai/en/latest/',
|
||||
};
|
||||
type LlmFactory = keyof typeof llmFactoryToUrlMap;
|
||||
|
||||
@ -66,11 +71,11 @@ const OllamaModal = ({
|
||||
llmFactoryToUrlMap[llmFactory as LlmFactory] ||
|
||||
'https://github.com/infiniflow/ragflow/blob/main/docs/guides/deploy_local_llm.mdx';
|
||||
const optionsMap = {
|
||||
HuggingFace: [
|
||||
[LLMFactory.HuggingFace]: [
|
||||
{ value: 'embedding', label: 'embedding' },
|
||||
{ value: 'chat', label: 'chat' },
|
||||
],
|
||||
Xinference: [
|
||||
[LLMFactory.Xinference]: [
|
||||
{ value: 'chat', label: 'chat' },
|
||||
{ value: 'embedding', label: 'embedding' },
|
||||
{ value: 'rerank', label: 'rerank' },
|
||||
@ -78,10 +83,8 @@ const OllamaModal = ({
|
||||
{ value: 'speech2text', label: 'sequence2text' },
|
||||
{ value: 'tts', label: 'tts' },
|
||||
],
|
||||
ModelScope: [
|
||||
{ value: 'chat', label: 'chat' },
|
||||
],
|
||||
GPUStack: [
|
||||
[LLMFactory.ModelScope]: [{ value: 'chat', label: 'chat' }],
|
||||
[LLMFactory.GPUStack]: [
|
||||
{ value: 'chat', label: 'chat' },
|
||||
{ value: 'embedding', label: 'embedding' },
|
||||
{ value: 'rerank', label: 'rerank' },
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { LLMFactory } from '@/constants/llm';
|
||||
import { IFactory } from '@/interfaces/database/llm';
|
||||
import isObject from 'lodash/isObject';
|
||||
import snakeCase from 'lodash/snakeCase';
|
||||
@ -36,12 +37,12 @@ export const formatNumberWithThousandsSeparator = (numberStr: string) => {
|
||||
};
|
||||
|
||||
const orderFactoryList = [
|
||||
'OpenAI',
|
||||
'Moonshot',
|
||||
"PPIO",
|
||||
'ZHIPU-AI',
|
||||
'Ollama',
|
||||
'Xinference',
|
||||
LLMFactory.OpenAI,
|
||||
LLMFactory.Moonshot,
|
||||
LLMFactory.PPIO,
|
||||
LLMFactory.ZhipuAI,
|
||||
LLMFactory.Ollama,
|
||||
LLMFactory.Xinference,
|
||||
];
|
||||
|
||||
export const sortLLmFactoryListBySpecifiedOrder = (list: IFactory[]) => {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user