-
Notifications
You must be signed in to change notification settings - Fork 6
Test settings #124
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Test settings #124
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
/* | ||
* Copyright (c) Jupyter Development Team. | ||
* Distributed under the terms of the Modified BSD License. | ||
*/ | ||
|
||
module.exports = require('@jupyterlab/testing/lib/babel-config'); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
/* | ||
* Copyright (c) Jupyter Development Team. | ||
* Distributed under the terms of the Modified BSD License. | ||
*/ | ||
|
||
const jestJupyterLab = require('@jupyterlab/testing/lib/jest-config'); | ||
|
||
const esModules = [ | ||
'@codemirror', | ||
'@jupyterlab/', | ||
'exenv-es6', | ||
'lib0', | ||
'nanoid', | ||
'vscode-ws-jsonrpc' | ||
].join('|'); | ||
|
||
const baseConfig = jestJupyterLab(__dirname); | ||
|
||
module.exports = { | ||
...baseConfig, | ||
automock: false, | ||
collectCoverageFrom: [ | ||
'src/**/*.{ts,tsx}', | ||
'!src/**/*.d.ts', | ||
'!src/**/.ipynb_checkpoints/*' | ||
], | ||
coverageReporters: ['lcov', 'text'], | ||
testRegex: 'src/.*/.*.spec.ts[x]?$', | ||
transformIgnorePatterns: [`/node_modules/(?!${esModules}).+`] | ||
}; |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
import { compileSchema, SchemaNode } from 'json-schema-library'; | ||
import { ReadableStream } from 'web-streams-polyfill'; | ||
// @ts-expect-error | ||
globalThis.ReadableStream = ReadableStream; | ||
// if (typeof global.ReadableStream === undefined) { | ||
// global.ReadableStream = ReadableStream; | ||
// } | ||
|
||
import { ChatAnthropic } from '@langchain/anthropic'; | ||
// import { ChatWebLLM } from '@langchain/community/chat_models/webllm'; | ||
import { ChromeAI } from '@langchain/community/experimental/llms/chrome_ai'; | ||
import { ChatGoogleGenerativeAI } from '@langchain/google-genai'; | ||
import { ChatMistralAI } from '@langchain/mistralai'; | ||
import { ChatOllama } from '@langchain/ollama'; | ||
import { ChatOpenAI } from '@langchain/openai'; | ||
|
||
// Import Settings | ||
import AnthropicSettings from '../default-providers/Anthropic/settings-schema.json'; | ||
import ChromeAISettings from '../default-providers/ChromeAI/settings-schema.json'; | ||
import GeminiSettings from '../default-providers/Gemini/settings-schema.json'; | ||
import MistralAISettings from '../default-providers/MistralAI/settings-schema.json'; | ||
import OllamaAISettings from '../default-providers/Ollama/settings-schema.json'; | ||
import OpenAISettings from '../default-providers/OpenAI/settings-schema.json'; | ||
// import WebLLMSettings from '../default-providers/WebLLM/settings-schema.json'; | ||
import { IAIProvider, IType } from '../tokens'; | ||
import { | ||
BaseChatModel, | ||
BaseChatModelCallOptions | ||
} from '@langchain/core/language_models/chat_models'; | ||
import { AIMessageChunk } from '@langchain/core/messages'; | ||
|
||
interface IAIProviderWithChat extends IAIProvider { | ||
chat: IType<BaseChatModel<BaseChatModelCallOptions, AIMessageChunk>>; | ||
} | ||
const AIProviders: IAIProviderWithChat[] = [ | ||
{ | ||
name: 'Anthropic', | ||
chat: ChatAnthropic, | ||
settingsSchema: AnthropicSettings | ||
}, | ||
{ | ||
name: 'ChromeAI', | ||
// TODO: fix | ||
// @ts-expect-error: missing properties | ||
chat: ChromeAI, | ||
settingsSchema: ChromeAISettings | ||
}, | ||
{ | ||
name: 'MistralAI', | ||
chat: ChatMistralAI, | ||
settingsSchema: MistralAISettings | ||
}, | ||
{ | ||
name: 'Ollama', | ||
chat: ChatOllama, | ||
settingsSchema: OllamaAISettings | ||
}, | ||
{ | ||
name: 'Gemini', | ||
chat: ChatGoogleGenerativeAI, | ||
settingsSchema: GeminiSettings | ||
}, | ||
{ | ||
name: 'OpenAI', | ||
chat: ChatOpenAI, | ||
settingsSchema: OpenAISettings | ||
} | ||
// { | ||
// name: 'WebLLM', | ||
// chat: ChatWebLLM, | ||
// settingsSchema: WebLLMSettings | ||
// } | ||
]; | ||
|
||
it('test provider settings', () => { | ||
AIProviders.forEach(provider => { | ||
console.log(`PROVIDER: ${provider.name}`); | ||
const schema: SchemaNode = compileSchema(provider.settingsSchema); | ||
const defaultSettings = schema.getData(undefined, { | ||
addOptionalProps: true | ||
}); | ||
|
||
// Set a value for apiKey to avoid errors at instantiation. | ||
if (defaultSettings.apiKey !== undefined) { | ||
defaultSettings.apiKey = 'abc'; | ||
} | ||
const model = new provider.chat(defaultSettings); | ||
|
||
Object.entries(defaultSettings).forEach(([key, value]) => { | ||
try { | ||
// @ts-expect-error | ||
expect(JSON.stringify(model[key])).toEqual(JSON.stringify(value)); | ||
} catch (err) { | ||
// @ts-expect-error | ||
err.message = `${err.message}\nproperty: ${key}\n`; | ||
throw err; // throw the error so test fails as expected | ||
} | ||
}); | ||
}); | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,10 +2,6 @@ | |
"$schema": "http://json-schema.org/draft-07/schema#", | ||
"type": "object", | ||
"properties": { | ||
"concurrency": { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Not sure about this one. |
||
"type": "number", | ||
"deprecated": "Use `maxConcurrency` instead" | ||
}, | ||
"topK": { | ||
"type": "number" | ||
}, | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -38,10 +38,6 @@ | |
"description": "Model name to use (e.g., gemini-pro, gemini-2.0-flash, etc.)", | ||
"default": "gemini-pro" | ||
}, | ||
"baseURL": { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It doesn't set an attribute of the object, but may be useful https://github.com/langchain-ai/langchainjs/blob/c52cc0d53473e8507382c575cba5d46e107783ff/libs/langchain-google-genai/src/chat_models.ts#L707 |
||
"type": "string", | ||
"description": "Base URL for the Google AI API" | ||
}, | ||
"safetySettings": { | ||
"type": "array", | ||
"description": "Safety settings for content filtering", | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -93,12 +93,6 @@ | |
"type": ["string", "number"], | ||
"default": "5m" | ||
}, | ||
"stop": { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Should be nice to keep it. |
||
"type": "array", | ||
"items": { | ||
"type": "string" | ||
} | ||
}, | ||
"disableStreaming": { | ||
"type": "boolean", | ||
"description": "Whether to disable streaming.\n\nIf streaming is bypassed, then `stream()` will defer to `invoke()`.\n\n- If true, will always bypass streaming case.\n- If false (default), will always use streaming case if available." | ||
|
@@ -113,11 +107,6 @@ | |
"description": "The host URL of the Ollama server.", | ||
"default": "" | ||
}, | ||
"headers": { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
"type": "object", | ||
"additionalProperties": false, | ||
"description": "Optional HTTP Headers to include in the request." | ||
}, | ||
"checkOrPullModel": { | ||
"type": "boolean", | ||
"description": "Whether or not to check the model exists on the local machine before invoking it. If set to `true`, the model will be pulled if it does not exist.", | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Should probably be kept, the attribute doesn't have the same name https://github.com/langchain-ai/langchainjs/blob/c52cc0d53473e8507382c575cba5d46e107783ff/libs/langchain-anthropic/src/chat_models.ts#L701