Skip to content

Commit 36e1705

Browse files
authored
Merge pull request #238 from eljamm/llama-cpp
add: script llama-cpp-integration
2 parents becbcb5 + c638096 commit 36e1705

File tree

2 files changed

+56
-0
lines changed

2 files changed

+56
-0
lines changed

llama-cpp-integration/info.json

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
{
2+
"name": "llama-cpp integration",
3+
"identifier": "llama-cpp-integration",
4+
"script": "llama-cpp-integration.qml",
5+
"version": "0.0.1",
6+
"minAppVersion": "24.6.2",
7+
"authors": ["@pbek"],
8+
"description" : "This script provides integration for a local <a href=\"https://github.com/ggerganov/llama.cpp\">llama-cpp</a> AI backend."
9+
}
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
import QtQml 2.0
2+
import QOwnNotesTypes 1.0
3+
4+
/**
5+
* This script provides integration for a local llama-cpp backend
6+
* See: https://github.com/ggerganov/llama.cpp
7+
* List of models: https://github.com/ggerganov/llama.cpp#description
8+
* OpenAPI endpoint: https://github.com/ggerganov/llama.cpp/blob/master/examples/server/README.md#result-json-1
9+
*/
10+
Script {
11+
property string baseUrl;
12+
property string models;
13+
14+
// register your settings variables so the user can set them in the script settings
15+
property variant settingsVariables: [
16+
{
17+
"identifier": "baseUrl",
18+
"name": "API base URL",
19+
"description": "The chat base URL of the llama-cpp API.",
20+
"type": "string",
21+
"default": "http://127.0.0.1:8080/v1/chat/completions",
22+
},
23+
{
24+
"identifier": "models",
25+
"name": "Models",
26+
"description": "Comma separated list of models to use.",
27+
"type": "string",
28+
"default": "llama3,gemma:2b",
29+
},
30+
];
31+
32+
/**
33+
* This function is called when the OpenAI service config is reloaded
34+
* It returns a list of objects with config parameters for new OpenAI backends
35+
*/
36+
function openAiBackendsHook() {
37+
return [
38+
{
39+
"id": "llama-cpp",
40+
"name": "llama-cpp",
41+
"baseUrl": baseUrl,
42+
"apiKey": "llama-cpp",
43+
"models": models.split(",")
44+
},
45+
];
46+
}
47+
}

0 commit comments

Comments
 (0)