diff --git a/README.md b/README.md
index 1d61efa..1cf2800 100644
--- a/README.md
+++ b/README.md
@@ -149,6 +149,11 @@ English/[简体中文](https://github.com/deepseek-ai/awesome-deepseek-integrati
avante.nvim |
avante.nvim is an open-source autopilot in IDE |
+
+ |
+ llm.nvim |
+ A free large language model(LLM) plugin that allows you to interact with LLM in Neovim. Supports any LLM, such as Deepseek, GPT, GLM, Kimi or local LLMs (such as ollama). |
+
### JetBrains Extensions
diff --git a/README_cn.md b/README_cn.md
index 5212a0b..ba58984 100644
--- a/README_cn.md
+++ b/README_cn.md
@@ -142,6 +142,11 @@
avante.nvim |
开源 IDE 插件,使用 LLM 做你的编程助手 |
+
+ |
+ llm.nvim |
+ 免费的大语言模型插件,让你在Neovim中与大模型交互,支持任意一款大模型,比如Deepseek,GPT,GLM,kimi或者本地运行的大模型(比如ollama) |
+
### JetBrains 插件
diff --git a/docs/llm.nvim/README.md b/docs/llm.nvim/README.md
new file mode 100644
index 0000000..b50300b
--- /dev/null
+++ b/docs/llm.nvim/README.md
@@ -0,0 +1,78 @@
+> [!IMPORTANT]
+> A free large language model(LLM) plugin that allows you to interact with LLM in Neovim.
+>
+> The main functions include but are not limited to:
+>
+> - AI chat
+> - Word translation
+> - Code explanation
+> - Code optimization
+> - Test case generation
+> - AI translation
+> - Generation of git commit messages
+> - Generation of docstrings
+
+Link: [Github:Kurama622/llm.nvim](https://github.com/Kurama622/llm.nvim)
+
+# Install
+
+- lazy.nvim
+
+```lua
+return {
+ {
+ "Kurama622/llm.nvim",
+ dependencies = { "nvim-lua/plenary.nvim", "MunifTanjim/nui.nvim" },
+ cmd = { "LLMSesionToggle", "LLMSelectedTextHandler", "LLMAppHandler" },
+ config = function()
+ require("llm").setup({
+ url = "https://api.deepseek.com/chat/completions",
+ model = "deepseek-chat",
+ api_type = "openai",
+ max_tokens = 4096,
+ temperature = 0.3,
+ top_p = 0.7,
+
+ prompt = "You are a helpful Chinese assistant.",
+
+ prefix = {
+ user = { text = " ", hl = "Title" },
+ assistant = { text = " ", hl = "Added" },
+ },
+
+ -- history_path = "/tmp/llm-history",
+ save_session = true,
+ max_history = 15,
+ max_history_name_length = 20,
+
+ -- stylua: ignore
+ keys = {
+ -- The keyboard mapping for the input window.
+ ["Input:Submit"] = { mode = "n", key = "" },
+ ["Input:Cancel"] = { mode = {"n", "i"}, key = "" },
+ ["Input:Resend"] = { mode = {"n", "i"}, key = "" },
+
+ -- only works when "save_session = true"
+ ["Input:HistoryNext"] = { mode = {"n", "i"}, key = "" },
+ ["Input:HistoryPrev"] = { mode = {"n", "i"}, key = "" },
+
+ -- The keyboard mapping for the output window in "split" style.
+ ["Output:Ask"] = { mode = "n", key = "i" },
+ ["Output:Cancel"] = { mode = "n", key = "" },
+ ["Output:Resend"] = { mode = "n", key = "" },
+
+ -- The keyboard mapping for the output and input windows in "float" style.
+ ["Session:Toggle"] = { mode = "n", key = "ac" },
+ ["Session:Close"] = { mode = "n", key = {"", "Q"} },
+ },
+ })
+ end,
+ keys = {
+ { "ac", mode = "n", "LLMSessionToggle" },
+ { "ae", mode = "v", "LLMSelectedTextHandler 请解释下面这段代码" },
+ { "ts", mode = "x", "LLMSelectedTextHandler 英译汉" },
+ },
+ },
+}
+```
+
diff --git a/docs/llm.nvim/README_cn.md b/docs/llm.nvim/README_cn.md
new file mode 100644
index 0000000..f4beae8
--- /dev/null
+++ b/docs/llm.nvim/README_cn.md
@@ -0,0 +1,78 @@
+> [!IMPORTANT]
+> 免费的大语言模型插件,让你在Neovim中与大模型交互
+>
+> 主要功能包括但不限于:
+>
+> - AI对话
+> - 划词翻译
+> - 解释代码
+> - 优化代码
+> - 生成测试用例
+> - AI翻译
+> - 生成git commit信息
+> - 生成docstring
+
+链接: [Github:Kurama622/llm.nvim](https://github.com/Kurama622/llm.nvim)
+
+# 安装
+
+- lazy.nvim
+
+```lua
+return {
+ {
+ "Kurama622/llm.nvim",
+ dependencies = { "nvim-lua/plenary.nvim", "MunifTanjim/nui.nvim" },
+ cmd = { "LLMSesionToggle", "LLMSelectedTextHandler", "LLMAppHandler" },
+ config = function()
+ require("llm").setup({
+ url = "https://api.deepseek.com/chat/completions",
+ model = "deepseek-chat",
+ api_type = "openai",
+ max_tokens = 4096,
+ temperature = 0.3,
+ top_p = 0.7,
+
+ prompt = "You are a helpful Chinese assistant.",
+
+ prefix = {
+ user = { text = " ", hl = "Title" },
+ assistant = { text = " ", hl = "Added" },
+ },
+
+ -- history_path = "/tmp/llm-history",
+ save_session = true,
+ max_history = 15,
+ max_history_name_length = 20,
+
+ -- stylua: ignore
+ keys = {
+ -- The keyboard mapping for the input window.
+ ["Input:Submit"] = { mode = "n", key = "" },
+ ["Input:Cancel"] = { mode = {"n", "i"}, key = "" },
+ ["Input:Resend"] = { mode = {"n", "i"}, key = "" },
+
+ -- only works when "save_session = true"
+ ["Input:HistoryNext"] = { mode = {"n", "i"}, key = "" },
+ ["Input:HistoryPrev"] = { mode = {"n", "i"}, key = "" },
+
+ -- The keyboard mapping for the output window in "split" style.
+ ["Output:Ask"] = { mode = "n", key = "i" },
+ ["Output:Cancel"] = { mode = "n", key = "" },
+ ["Output:Resend"] = { mode = "n", key = "" },
+
+ -- The keyboard mapping for the output and input windows in "float" style.
+ ["Session:Toggle"] = { mode = "n", key = "ac" },
+ ["Session:Close"] = { mode = "n", key = {"", "Q"} },
+ },
+ })
+ end,
+ keys = {
+ { "ac", mode = "n", "LLMSessionToggle" },
+ { "ae", mode = "v", "LLMSelectedTextHandler 请解释下面这段代码" },
+ { "ts", mode = "x", "LLMSelectedTextHandler 英译汉" },
+ },
+ },
+}
+```
+