Skip to content

Commit

Permalink
commands to allow pause, resume, and toggle inferencing
Browse files Browse the repository at this point in the history
  • Loading branch information
bkyle committed Feb 22, 2024
1 parent 996ac71 commit 58b884a
Show file tree
Hide file tree
Showing 3 changed files with 72 additions and 7 deletions.
18 changes: 18 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,24 @@
"extensionKind": ["ui"],
"main": "./out/extension.js",
"contributes": {
"commands": [
{
"command": "llama.openSettings",
"title": "Llama Coder: Open Settings"
},
{
"command": "llama.pause",
"title": "Llama Coder: Pause"
},
{
"command": "llama.resume",
"title": "Llama Coder: Resume"
},
{
"command": "llama.toggle",
"title": "Llama Coder: Toggle"
}
],
"configuration": [
{
"title": "Llama coder",
Expand Down
17 changes: 14 additions & 3 deletions src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ export function activate(context: vscode.ExtensionContext) {
info('Llama Coder is activated.');

// Create status bar
const openSettings = 'llama.openSettings';
context.subscriptions.push(vscode.commands.registerCommand(openSettings, () => {
context.subscriptions.push(vscode.commands.registerCommand('llama.openSettings', () => {
vscode.commands.executeCommand('workbench.action.openSettings', '@ext:ex3ndr.llama-coder');
}));

let statusBarItem = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Right, 100);
statusBarItem.command = openSettings;
statusBarItem.command = 'llama.toggle';
statusBarItem.text = `$(chip) Llama Coder`;
statusBarItem.show();
context.subscriptions.push(statusBarItem);
Expand All @@ -23,6 +23,17 @@ export function activate(context: vscode.ExtensionContext) {
const provider = new PromptProvider(statusBarItem, context);
let disposable = vscode.languages.registerInlineCompletionItemProvider({ pattern: '**', }, provider);
context.subscriptions.push(disposable);

context.subscriptions.push(vscode.commands.registerCommand('llama.pause', () => {
provider.paused = true;
}));
context.subscriptions.push(vscode.commands.registerCommand('llama.resume', () => {
provider.paused = false;
}));
context.subscriptions.push(vscode.commands.registerCommand('llama.toggle', () => {
provider.paused = !provider.paused;
}));

}

export function deactivate() {
Expand Down
44 changes: 40 additions & 4 deletions src/prompts/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,49 @@ import { ollamaCheckModel } from '../modules/ollamaCheckModel';
import { ollamaDownloadModel } from '../modules/ollamaDownloadModel';
import { config } from '../config';

type Status = {
icon: string;
text: string;
};

export class PromptProvider implements vscode.InlineCompletionItemProvider {

lock = new AsyncLock();
statusbar: vscode.StatusBarItem;
context: vscode.ExtensionContext;
private _paused: boolean = false;
private _status: Status = { icon: "chip", text: "Llama Coder" };

constructor(statusbar: vscode.StatusBarItem, context: vscode.ExtensionContext) {
this.statusbar = statusbar;
this.context = context;
}

public set paused(value: boolean) {
this._paused = value;
this.update();
}

public get paused(): boolean {
return this._paused;
}

private update(icon?: string, text?: string): void {
this._status.icon = icon ? icon : this._status.icon;
this._status.text = text ? text : this._status.text;

let statusText = '';
let statusTooltip = '';
if (this._paused) {
statusText = `$(sync-ignored) ${this._status.text}`;
statusTooltip = `${this._status.text} (Paused)`;
} else {
statusText = `$(${this._status.icon}) ${this._status.text}`;
statusTooltip = `${this._status.text}`;
}
this.statusbar.text = statusText;
this.statusbar.tooltip = statusTooltip;
}

async delayCompletion(delay: number, token: vscode.CancellationToken): Promise<boolean> {
if (config.inference.delay < 0) {
Expand All @@ -37,6 +70,9 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
}

try {
if (this.paused) {
return;
}

// Ignore unsupported documents
if (!isSupported(document)) {
Expand Down Expand Up @@ -82,7 +118,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
let inferenceConfig = config.inference;

// Update status
this.statusbar.text = `$(sync~spin) Llama Coder`;
this.update('sync~spin', 'Llama Coder');
try {

// Check model exists
Expand Down Expand Up @@ -110,9 +146,9 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
}

// Perform download
this.statusbar.text = `$(sync~spin) Downloading`;
this.update('sync~spin', 'Downloading');
await ollamaDownloadModel(inferenceConfig.endpoint, inferenceConfig.modelName);
this.statusbar.text = `$(sync~spin) Llama Coder`;
this.update('sync~spin', 'Llama Coder')
}
if (token.isCancellationRequested) {
info(`Canceled after AI completion.`);
Expand Down Expand Up @@ -141,7 +177,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
value: res
});
} finally {
this.statusbar.text = `$(chip) Llama Coder`;
this.update('chip', 'Llama Coder');
}
} else {
if (cached !== null) {
Expand Down

0 comments on commit 58b884a

Please sign in to comment.