Skip to content

Commit

Permalink
Fix typo: disableGPU != disableGpu
Browse files Browse the repository at this point in the history
This made the local inference try to use the GPU even when the user
provided the `--disableGpu` flag.
  • Loading branch information
jehna committed Aug 24, 2024
1 parent 85d17e7 commit cfbf0d4
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 6 deletions.
4 changes: 3 additions & 1 deletion src/commands/local.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,11 @@ export const local = cli()
verbose.enabled = true;
}

verbose.log("Starting local inference with options: ", opts);

const prompt = await llama({
model: opts.model,
disableGPU: opts.disableGPU,
disableGpu: opts.disableGpu,
seed: opts.seed ? parseInt(opts.seed) : undefined
});
await unminify(filename, opts.outputDir, [
Expand Down
18 changes: 13 additions & 5 deletions src/plugins/local-llm-rename/llama.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
import { getLlama, LlamaChatSession, LlamaGrammar } from "node-llama-cpp";
import {
getLlama,
LlamaChatSession,
LlamaGrammar,
LlamaModelOptions
} from "node-llama-cpp";
import { Gbnf } from "./gbnf.js";
import { getModelPath, getModelWrapper } from "../../local-models.js";
import { verbose } from "../../verbose.js";

export type Prompt = (
systemPrompt: string,
Expand All @@ -13,13 +19,15 @@ const IS_CI = process.env["CI"] === "true";
export async function llama(opts: {
seed?: number;
model: string;
disableGPU?: boolean;
disableGpu?: boolean;
}): Promise<Prompt> {
const llama = await getLlama();
const model = await llama.loadModel({
const modelOpts: LlamaModelOptions = {
modelPath: getModelPath(opts?.model),
gpuLayers: (opts?.disableGPU ?? IS_CI) ? 0 : undefined
});
gpuLayers: (opts?.disableGpu ?? IS_CI) ? 0 : undefined
};
verbose.log("Loading model with options", modelOpts);
const model = await llama.loadModel(modelOpts);

const context = await model.createContext({ seed: opts?.seed });

Expand Down

0 comments on commit cfbf0d4

Please sign in to comment.