feat(nvim): add oatmeal plugin to use ollama directly in Neovim

This commit is contained in:
Stefan Imhoff
2024-01-01 14:13:43 +01:00
parent e844195053
commit 3b423caee8
5 changed files with 47 additions and 0 deletions

View File

@@ -27,6 +27,7 @@
~/.config/lazygit/config.yml: git/lazygit.yml
~/.config/lf: lf
~/.config/nvim: nvim
~/.config/oatmeal: oatmeal
~/.config/ranger: ranger
~/.config/skhd: skhd
~/.config/starship.toml: prompt/starship.toml

View File

@@ -477,6 +477,8 @@ brew "charmbracelet/tap/vhs"
brew "charmbracelet/tap/wishlist"
# CLI tool for CookLang Recipe Markup Language
brew "cooklang/tap/cook"
# Terminal UI to chat with large language models (LLM) using backends such as Ollama, and direct integrations with your favourite editor like Neovim!
brew "dustinblackman/tap/oatmeal"
# prettier, on SPEED!
brew "fsouza/prettierd/prettierd"
# macOS command line utility to configure multi-display resolutions and arrangements. Essentially XRandR for macOS.

View File

@@ -85,6 +85,7 @@
"nvim-ufo": { "branch": "main", "commit": "c6d88523f574024b788f1c3400c5d5b9bb1a0407" },
"nvim-various-textobjs": { "branch": "main", "commit": "8ebbedb58845d4bce91cfb1db705fb4e942ee333" },
"nvim-web-devicons": { "branch": "master", "commit": "43aa2ddf476012a2155f5f969ee55ab17174da7a" },
"oatmeal.nvim": { "branch": "master", "commit": "efeca77819c136a4a93c8913f8d3601daf76abb4" },
"obsidian.nvim": { "branch": "main", "commit": "4a962b100a77f852207e9f0b8bc8e3564997a05f" },
"octo.nvim": { "branch": "master", "commit": "b5371003f209764c9d1cc43cf20b6dc52961f0e8" },
"oil.nvim": { "branch": "master", "commit": "523b61430cb7365f8f86609c2ea60e48456bac63" },

View File

@@ -0,0 +1,11 @@
return {
"dustinblackman/oatmeal.nvim",
cmd = { "Oatmeal" },
keys = {
{ "<leader>om", mode = "n", desc = "Start Oatmeal session" },
},
opts = {
backend = "ollama",
model = "codellama:latest",
},
}

32
oatmeal/config.toml Normal file
View File

@@ -0,0 +1,32 @@
# The initial backend hosting a model to connect to. [possible values: langchain, ollama, openai]
backend = "ollama"
# Time to wait in milliseconds before timing out when doing a healthcheck for a backend.
backend-health-check-timeout = 1000
# The editor to integrate with. [possible values: neovim, clipboard, none]
editor = "neovim"
# The initial model on a backend to consume. Defaults to the first model available from the backend if not set.
model = "codellama:latest"
# LangChain Serve API URL when using the LangChain backend.
lang-chain-url = "http://localhost:8000"
# Ollama API URL when using the Ollama backend.
ollama-url = "http://localhost:11434"
# OpenAI API token when using the OpenAI backend.
# open-ai-token = ""
# OpenAI API URL when using the OpenAI backend. Can be swapped to a compatible proxy.
open-ai-url = "https://api.openai.com"
# Sets code syntax highlighting theme. [possible values: base16-github, base16-monokai, base16-one-light, base16-onedark, base16-seti]
theme = "base16-onedark"
# Absolute path to a TextMate tmTheme to use for code syntax highlighting.
# theme-file = ""
# Your user name displayed in all chat bubbles.
# username = ""