Skip to content

Commit 38a77bb

Browse files
committed
feat: stable-code support, cancel download only for specific model
1 parent 241d47b commit 38a77bb

File tree

4 files changed

+19
-6
lines changed

4 files changed

+19
-6
lines changed

package.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
"inference.model": {
4646
"type": "string",
4747
"enum": [
48+
"stable-code:3b-code-q4_0",
4849
"codellama:7b-code-q4_K_S",
4950
"codellama:7b-code-q4_K_M",
5051
"codellama:7b-code-q6_K",
@@ -70,7 +71,7 @@
7071
"deepseek-coder:33b-base-fp16",
7172
"custom"
7273
],
73-
"default": "deepseek-coder:1.3b-base-q4_1",
74+
"default": "stable-code:3b-code-q4_0",
7475
"description": "Inference model to use",
7576
"order": 2
7677
},
@@ -89,6 +90,7 @@
8990
"inference.custom.format": {
9091
"type": "string",
9192
"enum": [
93+
"stable-code",
9294
"codellama",
9395
"deepseek"
9496
],

src/config.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import vscode from 'vscode';
2+
import { ModelFormat } from './prompts/processors/models';
23

34
class Config {
45

@@ -22,13 +23,15 @@ class Config {
2223

2324
// Load model
2425
let modelName = config.get('model') as string;
25-
let modelFormat: 'codellama' | 'deepseek' = 'codellama';
26+
let modelFormat: ModelFormat = 'codellama';
2627
if (modelName === 'custom') {
2728
modelName = config.get('custom.model') as string;
28-
modelFormat = config.get('cutom.format') as 'codellama' | 'deepseek';
29+
modelFormat = config.get('cutom.format') as ModelFormat;
2930
} else {
3031
if (modelName.startsWith('deepseek-coder')) {
3132
modelFormat = 'deepseek';
33+
} else if (modelName.startsWith('stable-code')) {
34+
modelFormat = 'stable-code';
3235
}
3336
}
3437

src/prompts/processors/models.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
export type ModelFormat = 'codellama' | 'deepseek';
1+
export type ModelFormat = 'codellama' | 'deepseek' | 'stable-code';
22

33
export function adaptPrompt(args: { format: ModelFormat, prefix: string, suffix: string }): { prompt: string, stop: string[] } {
44

@@ -18,6 +18,14 @@ export function adaptPrompt(args: { format: ModelFormat, prefix: string, suffix:
1818
};
1919
}
2020

21+
// Stable code FIM
22+
if (args.format === 'stable-code') {
23+
return {
24+
prompt: `<fim_prefix>${args.prefix}<fim_suffix>${args.suffix}<fim_middle>`,
25+
stop: [`<|endoftext|>`]
26+
};
27+
}
28+
2129
// Codellama FIM
2230
return {
2331
prompt: `<PRE> ${args.prefix} <SUF>${args.suffix} <MID>`,

src/prompts/provider.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
9191
if (!modelExists) {
9292

9393
// Check if user asked to ignore download
94-
if (this.context.globalState.get('llama-coder-download-ignored')) {
94+
if (this.context.globalState.get('llama-coder-download-ignored') === inferenceConfig.modelName) {
9595
info(`Ingoring since user asked to ignore download.`);
9696
return;
9797
}
@@ -100,7 +100,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
100100
let download = await vscode.window.showInformationMessage(`Model ${inferenceConfig.modelName} is not downloaded. Do you want to download it? Answering "No" would require you to manually download model.`, 'Yes', 'No');
101101
if (download === 'No') {
102102
info(`Ingoring since user asked to ignore download.`);
103-
this.context.globalState.update('llama-coder-download-ignored', true);
103+
this.context.globalState.update('llama-coder-download-ignored', inferenceConfig.modelName);
104104
return;
105105
}
106106

0 commit comments

Comments
 (0)