remove ollama
This commit is contained in:
parent
ffc2d4c13d
commit
d184855eb2
1 changed files with 0 additions and 28 deletions
|
@ -8,34 +8,6 @@ return {
|
||||||
openai = {
|
openai = {
|
||||||
api_key_name = "cmd:cat ~/.openai",
|
api_key_name = "cmd:cat ~/.openai",
|
||||||
},
|
},
|
||||||
provider = "ollama",
|
|
||||||
vendors = {
|
|
||||||
---@type AvanteProvider
|
|
||||||
ollama = {
|
|
||||||
["local"] = true,
|
|
||||||
endpoint = "127.0.0.1:11434/v1",
|
|
||||||
model = "codegemma",
|
|
||||||
parse_curl_args = function(opts, code_opts)
|
|
||||||
return {
|
|
||||||
url = opts.endpoint .. "/chat/completions",
|
|
||||||
headers = {
|
|
||||||
["Accept"] = "application/json",
|
|
||||||
["Content-Type"] = "application/json",
|
|
||||||
},
|
|
||||||
body = {
|
|
||||||
model = opts.model,
|
|
||||||
messages = require("avante.providers").copilot.parse_message(code_opts), -- you can make your own message, but this is very advanced
|
|
||||||
max_tokens = 2048,
|
|
||||||
stream = true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
end,
|
|
||||||
parse_response_data = function(data_stream, event_state, opts)
|
|
||||||
require("avante.providers").openai.parse_response(data_stream, event_state, opts)
|
|
||||||
end,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
-- if you want to build from source then do `make BUILD_FROM_SOURCE=true`
|
-- if you want to build from source then do `make BUILD_FROM_SOURCE=true`
|
||||||
build = "make",
|
build = "make",
|
||||||
|
|
Loading…
Reference in a new issue