Skip to content

Commit

Permalink
Merge pull request #70 from neatwork-ai/rename-extension
Browse files Browse the repository at this point in the history
Renamed DisplayName
  • Loading branch information
kaizu-xyz authored Nov 8, 2023
2 parents df74da2 + 5166dff commit 34fedd0
Show file tree
Hide file tree
Showing 11 changed files with 205 additions and 6 deletions.
2 changes: 2 additions & 0 deletions crates/neatcoder/src/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,5 @@ pub const CONFIG_FILES: [&str; 9] = [
"Package.swift", // Swift
".gitignore",
];

pub const BASE_BETA_URL: &str = "https://api.openai.com/v1/beta";
123 changes: 123 additions & 0 deletions crates/neatcoder/src/openai/assistant/assistant.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
use anyhow::{anyhow, Result};
use reqwest::{header::HeaderMap, Client};
use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize,
};
use serde_json::json;
use std::{collections::HashMap, fmt};

use crate::{consts::BASE_BETA_URL, openai::params::OpenAIModels};

#[derive(Serialize, Debug)]
pub struct AssistantRequest {
pub name: String,
pub instructions: String,
pub tools: Vec<String>, // TODO: "tools": [{"type": "code_interpreter"}]
pub model: OpenAIModels,
}

#[derive(Debug, Serialize, Deserialize)]
pub struct Assistant {
id: String,
object: String,
created_at: u32, // TODO: Should be a timestamp
name: String,
description: Option<String>,
model: OpenAIModels,
instructions: Option<String>,
tools: Vec<Tool>,
file_ids: Vec<String>,
metadata: HashMap<String, String>,
}

#[derive(Debug, Serialize)]
pub enum Tool {
CodeInterpreter,
Retrieval,
FunctionCall,
}

impl Tool {
pub fn new(tool: String) -> Self {
let tool = match tool.as_str() {
"code_interpreter" => Tool::CodeInterpreter,
"retrieval" => Tool::Retrieval,
"function" => Tool::FunctionCall,
_ => panic!("Invalid tool {}", tool),
};

tool
}

pub fn as_string(&self) -> String {
match self {
Tool::CodeInterpreter => String::from("code_interpreter"),
Tool::Retrieval => String::from("retrieval"),
Tool::FunctionCall => String::from("function"),
}
}
}

impl<'de> Deserialize<'de> for Tool {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ToolVisitor;

impl<'de> Visitor<'de> for ToolVisitor {
type Value = Tool;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string representing an OpenAI model")
}

fn visit_str<E>(self, value: &str) -> Result<Tool, E>
where
E: de::Error,
{
match value {
"code_interpreter" => Ok(Tool::CodeInterpreter),
"retrieval" => Ok(Tool::Retrieval),
"function" => Ok(Tool::FunctionCall),
_ => Err(E::custom(format!(
"unexpected OpenAI tool: {}",
value
))),
}
}
}

deserializer.deserialize_str(ToolVisitor)
}
}

impl AssistantRequest {
pub async fn create_assistant(
self,
client: &Client,
headers: &HeaderMap,
) -> Result<Assistant> {
let response = client
.post(&format!("{}/assistants", BASE_BETA_URL))
.headers(headers.clone())
.json(&json!({
"name": self.name, // "Math Tutor",
"instructions": self.instructions, // "You are a personal math tutor. Write and run code to answer math questions.",
"tools": self.tools, // [{"type": "code_interpreter"}],
"model": self.model, // "gpt-4-1106-preview"
}))
.send()
.await?;

if response.status().is_success() {
let assistant = response.json::<Assistant>().await?;
println!("Create Assistant response: {:?}", assistant);
Ok(assistant)
} else {
// If not successful, perhaps you want to parse it differently or handle the error
Err(anyhow!(response.status()))
}
}
}
Empty file.
1 change: 1 addition & 0 deletions crates/neatcoder/src/openai/assistant/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pub mod assistant;
Empty file.
1 change: 1 addition & 0 deletions crates/neatcoder/src/openai/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pub mod assistant;
///< Client for interacting with the OpenAI API.
pub mod msg;
pub mod params;
Expand Down
44 changes: 42 additions & 2 deletions crates/neatcoder/src/openai/params.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
use anyhow::Result;
use serde::Serialize;
use std::collections::HashMap;
use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize,
};
use std::{collections::HashMap, fmt};
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};

use crate::{
Expand Down Expand Up @@ -245,6 +248,43 @@ impl OpenAIModels {
}
}

impl<'de> Deserialize<'de> for OpenAIModels {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct OpenAIModelsVisitor;

impl<'de> Visitor<'de> for OpenAIModelsVisitor {
type Value = OpenAIModels;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string representing an OpenAI model")
}

fn visit_str<E>(self, value: &str) -> Result<OpenAIModels, E>
where
E: de::Error,
{
match value {
"gpt-4-32k" => Ok(OpenAIModels::Gpt432k),
"gpt-4" => Ok(OpenAIModels::Gpt4),
"gpt-3.5-turbo" => Ok(OpenAIModels::Gpt35Turbo),
"gpt-3.5-turbo-16k" => Ok(OpenAIModels::Gpt35Turbo16k),
"gpt-3.5-turbo-1106" => Ok(OpenAIModels::Gpt35Turbo1106),
"gpt-4-1106-preview" => Ok(OpenAIModels::Gpt41106Preview),
_ => Err(E::custom(format!(
"unexpected OpenAI model: {}",
value
))),
}
}
}

deserializer.deserialize_str(OpenAIModelsVisitor)
}
}

impl Default for OpenAIParams {
fn default() -> Self {
Self {
Expand Down
8 changes: 8 additions & 0 deletions vsce/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,14 @@ All notable changes to the "neatcoder" extension will be documented in this file

Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file.

## [0.2.4] - 08/11/2023

### Changed
- Reinstated classic Open AI models `gpt-3.5` and `gpt-4` models

### Fixed
- Chat http request error handling

## [0.2.3] - 07/11/2023

### Added
Expand Down
4 changes: 2 additions & 2 deletions vsce/package.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
"name": "neatcoder",
"displayName": "Neatwork AI - GPT4 on Steroids",
"displayName": "Neatwork AI - GPT4 Turbo on Steroids",
"description": "Turn your IDE into an AI Sofware engineer.",
"version": "0.2.3",
"version": "0.2.4",
"publisher": "NeatworkAi",
"repository": {
"url": "https://github.com/neatwork-ai/neatcoder-issues.git",
Expand Down
18 changes: 18 additions & 0 deletions vsce/src/chat/handlers.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import * as vscode from "vscode";
import { window } from "vscode";
import { getOrSetApiKey } from "../utils";
import * as wasm from "../../pkg/neatcoder";
import * as https from "https";
import * as http from "http";
import * as url from "url";
import { MessageBuffer } from "../utils/httpClient";
import { getLLMParams } from "../utils/utils";
Expand Down Expand Up @@ -55,6 +57,22 @@ export async function promptLLM(

const req = https.request(options, async (res) => {
console.log(`STATUS: ${res.statusCode}`);
if (res.statusCode !== 202) {
const statusMessage =
http.STATUS_CODES[res.statusCode!] || "Unknown status code";

console.log(`STATUS: ${res.statusCode} ${statusMessage}`);
// Here the use of `window` and `webviewPanel` assumes this is within a VS Code extension
window.showErrorMessage(
`HTTP error: STATUS: ${res.statusCode} ${statusMessage}`
);

reject(
new Error(`HTTP error: STATUS: ${res.statusCode} ${statusMessage}`)
);
return; // Stop further processing
}

res.setEncoding("utf8");
res.pause();

Expand Down
10 changes: 8 additions & 2 deletions vsce/src/utils/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,12 @@ export async function getOrSetModelVersion(): Promise<wasm.OpenAIModels | null>

if (!modelVersion) {
const value = await vscode.window.showQuickPick(
["gpt-3.5-turbo-1106", "gpt-4-1106-preview"],
[
"gpt-3.5-turbo-1106",
"gpt-4-1106-preview",
"gpt-3.5-turbo-16k",
"gpt-4",
],
{
canPickMany: false,
placeHolder: "Select an OpenAI model", // This is the placeholder text
Expand All @@ -306,11 +311,12 @@ export async function getOrSetModelVersion(): Promise<wasm.OpenAIModels | null>
return fromModelVersionToEnum(modelVersion);
}

// TODO: Remove dulplicated logic...
export async function setModelVersion() {
let config = vscode.workspace.getConfiguration("extension");

const value = await vscode.window.showQuickPick(
["gpt-3.5-turbo-1106", "gpt-4-1106-preview"],
["gpt-3.5-turbo-1106", "gpt-4-1106-preview", "gpt-3.5-turbo-16k", "gpt-4"],
{
canPickMany: false,
placeHolder: "Select an OpenAI model", // This is the placeholder text
Expand Down

0 comments on commit 34fedd0

Please sign in to comment.