@@ -9,16 +9,49 @@ import { ollamaCheckModel } from '../modules/ollamaCheckModel';
9
9
import { ollamaDownloadModel } from '../modules/ollamaDownloadModel' ;
10
10
import { config } from '../config' ;
11
11
12
+ type Status = {
13
+ icon : string ;
14
+ text : string ;
15
+ } ;
16
+
12
17
export class PromptProvider implements vscode . InlineCompletionItemProvider {
13
18
14
19
lock = new AsyncLock ( ) ;
15
20
statusbar : vscode . StatusBarItem ;
16
21
context : vscode . ExtensionContext ;
22
+ private _paused : boolean = false ;
23
+ private _status : Status = { icon : "chip" , text : "Llama Coder" } ;
17
24
18
25
constructor ( statusbar : vscode . StatusBarItem , context : vscode . ExtensionContext ) {
19
26
this . statusbar = statusbar ;
20
27
this . context = context ;
21
28
}
29
+
30
+ public set paused ( value : boolean ) {
31
+ this . _paused = value ;
32
+ this . update ( ) ;
33
+ }
34
+
35
+ public get paused ( ) : boolean {
36
+ return this . _paused ;
37
+ }
38
+
39
+ private update ( icon ?: string , text ?: string ) : void {
40
+ this . _status . icon = icon ? icon : this . _status . icon ;
41
+ this . _status . text = text ? text : this . _status . text ;
42
+
43
+ let statusText = '' ;
44
+ let statusTooltip = '' ;
45
+ if ( this . _paused ) {
46
+ statusText = `$(sync-ignored) ${ this . _status . text } ` ;
47
+ statusTooltip = `${ this . _status . text } (Paused)` ;
48
+ } else {
49
+ statusText = `$(${ this . _status . icon } ) ${ this . _status . text } ` ;
50
+ statusTooltip = `${ this . _status . text } ` ;
51
+ }
52
+ this . statusbar . text = statusText ;
53
+ this . statusbar . tooltip = statusTooltip ;
54
+ }
22
55
23
56
async delayCompletion ( delay : number , token : vscode . CancellationToken ) : Promise < boolean > {
24
57
if ( config . inference . delay < 0 ) {
@@ -37,6 +70,9 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
37
70
}
38
71
39
72
try {
73
+ if ( this . paused ) {
74
+ return ;
75
+ }
40
76
41
77
// Ignore unsupported documents
42
78
if ( ! isSupported ( document ) ) {
@@ -82,7 +118,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
82
118
let inferenceConfig = config . inference ;
83
119
84
120
// Update status
85
- this . statusbar . text = `$( sync~spin) Llama Coder` ;
121
+ this . update ( ' sync~spin' , ' Llama Coder' ) ;
86
122
try {
87
123
88
124
// Check model exists
@@ -110,9 +146,9 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
110
146
}
111
147
112
148
// Perform download
113
- this . statusbar . text = `$( sync~spin) Downloading` ;
149
+ this . update ( ' sync~spin' , ' Downloading' ) ;
114
150
await ollamaDownloadModel ( inferenceConfig . endpoint , inferenceConfig . modelName ) ;
115
- this . statusbar . text = `$( sync~spin) Llama Coder` ;
151
+ this . update ( ' sync~spin' , ' Llama Coder' )
116
152
}
117
153
if ( token . isCancellationRequested ) {
118
154
info ( `Canceled after AI completion.` ) ;
@@ -141,7 +177,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
141
177
value : res
142
178
} ) ;
143
179
} finally {
144
- this . statusbar . text = `$( chip) Llama Coder` ;
180
+ this . update ( ' chip' , ' Llama Coder' ) ;
145
181
}
146
182
} else {
147
183
if ( cached !== null ) {
0 commit comments