From 0fa44c5dd3579232af9514d8fdcd0e06758616fd Mon Sep 17 00:00:00 2001 From: Tianyi Jing Date: Mon, 17 Mar 2025 09:35:37 +0800 Subject: [PATCH] Fix: update link of deploy_local_llm.mdx (#6110) ### What problem does this PR solve? Links of [How to integrate with Ollama](https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx) need to be update after #5555 ``` https://github.com/infiniflow/ragflow/blob/main/docs/guides/deploy_local_llm.mdx -> https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx ``` ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) Signed-off-by: jingfelix --- .../pages/user-setting/setting-model/ollama-modal/index.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/src/pages/user-setting/setting-model/ollama-modal/index.tsx b/web/src/pages/user-setting/setting-model/ollama-modal/index.tsx index 1728be4d..8a271917 100644 --- a/web/src/pages/user-setting/setting-model/ollama-modal/index.tsx +++ b/web/src/pages/user-setting/setting-model/ollama-modal/index.tsx @@ -20,7 +20,7 @@ const { Option } = Select; const llmFactoryToUrlMap = { [LLMFactory.Ollama]: - 'https://github.com/infiniflow/ragflow/blob/main/docs/guides/deploy_local_llm.mdx', + 'https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx', [LLMFactory.Xinference]: 'https://inference.readthedocs.io/en/latest/user_guide', [LLMFactory.ModelScope]: @@ -69,7 +69,7 @@ const OllamaModal = ({ }; const url = llmFactoryToUrlMap[llmFactory as LlmFactory] || - 'https://github.com/infiniflow/ragflow/blob/main/docs/guides/deploy_local_llm.mdx'; + 'https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx'; const optionsMap = { [LLMFactory.HuggingFace]: [ { value: 'embedding', label: 'embedding' },