From 3dd7c1ac8018fe6070c4932a5c7eb335b70e21f6 Mon Sep 17 00:00:00 2001 From: takatost Date: Fri, 12 Jan 2024 12:21:33 +0800 Subject: [PATCH] optimize ollama docs --- en/advanced/model-configuration/ollama.md | 2 ++ zh_CN/guides/model-configuration/ollama.md | 2 ++ 2 files changed, 4 insertions(+) diff --git a/en/advanced/model-configuration/ollama.md b/en/advanced/model-configuration/ollama.md index 8f283a0..13af37c 100644 --- a/en/advanced/model-configuration/ollama.md +++ b/en/advanced/model-configuration/ollama.md @@ -34,7 +34,9 @@ Dify supports integrating LLM and Text Embedding capabilities of large language - Base URL: `http://:11434` Enter the base URL where the Ollama service is accessible. + If Dify is deployed using docker, consider using the local network IP address, e.g., `http://192.168.1.100:11434` or the docker host machine IP address, e.g., `http://172.17.0.1:11434`. + For local source code deployment, use `http://localhost:11434`. - Model Type: Chat diff --git a/zh_CN/guides/model-configuration/ollama.md b/zh_CN/guides/model-configuration/ollama.md index 0c531e7..5209144 100644 --- a/zh_CN/guides/model-configuration/ollama.md +++ b/zh_CN/guides/model-configuration/ollama.md @@ -34,7 +34,9 @@ Dify 支持接入 Ollama 部署的大型语言模型推理和 embedding 能力 - 基础 URL:`http://:11434` 此处需填写可访问到的 Ollama 服务地址。 + 若 Dify 为 docker 部署,建议填写局域网 IP 地址,如:`http://192.168.1.100:11434` 或 docker 宿主机 IP 地址,如:`http://172.17.0.1:11434`。 + 若为本地源码部署,可填写 `http://localhost:11434`。 - 模型类型:对话