Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@ console.log(response.message.content)
```

### Browser Usage

To use the library without node, import the browser module.

```javascript
import ollama from 'ollama/browser'
```
Expand Down
39 changes: 21 additions & 18 deletions src/browser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import type {
ShowResponse,
StatusResponse,
} from './interfaces.js'
import { EMPTY_STRING, MESSAGES, OLLAMA_LOCAL_URL, REQUEST_CONSTANTS } from './constants'

export class Ollama {
protected readonly config: Config
Expand All @@ -30,14 +31,15 @@ export class Ollama {

constructor(config?: Partial<Config>) {
this.config = {
host: '',
host: EMPTY_STRING,
}
if (!config?.proxy) {
this.config.host = utils.formatHost(config?.host ?? 'http://127.0.0.1:11434')
this.config.host = utils.formatHost(config?.host ?? OLLAMA_LOCAL_URL)
}

this.fetch = fetch
if (config?.fetch != null) {
// NOTE: fetch could either be undefined or an instance of Fetch
if (config?.fetch) {
this.fetch = config.fetch
}

Expand Down Expand Up @@ -76,7 +78,7 @@ export class Ollama {
)

if (!response.body) {
throw new Error('Missing body')
throw new Error(MESSAGES.MISSING_BODY)
}

const itr = utils.parseJSON<T | ErrorResponse>(response.body)
Expand All @@ -90,19 +92,18 @@ export class Ollama {
yield message
// message will be done in the case of chat and generate
// message will be success in the case of a progress response (pull, push, create)
if ((message as any).done || (message as any).status === 'success') {
if ((message as any).done || (message as any).status === MESSAGES.SUCCESS) {
return
}
}
throw new Error('Did not receive done or success response in stream.')
})()
} else {
const message = await itr.next()
if (!message.value.done && (message.value as any).status !== 'success') {
throw new Error('Expected a completed response.')
}
return message.value
}
const message = await itr.next()
if (!message.value.done && (message.value as any).status !== MESSAGES.SUCCESS) {
throw new Error('Expected a completed response.')
}
return message.value
}

/**
Expand Down Expand Up @@ -141,7 +142,10 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
if (request.images) {
request.images = await Promise.all(request.images.map(this.encodeImage.bind(this)))
}
return this.processStreamableRequest<GenerateResponse>('generate', request)
return this.processStreamableRequest<GenerateResponse>(
REQUEST_CONSTANTS.GENERATE,
request,
)
}

chat(request: ChatRequest & { stream: true }): Promise<AsyncGenerator<ChatResponse>>
Expand Down Expand Up @@ -179,11 +183,10 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
async create(
request: CreateRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
return this.processStreamableRequest<ProgressResponse>('create', {
return this.processStreamableRequest<ProgressResponse>(REQUEST_CONSTANTS.CREATE, {
name: request.model,
stream: request.stream,
modelfile: request.modelfile,
quantize: request.quantize,
})
}

Expand All @@ -199,7 +202,7 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
async pull(
request: PullRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
return this.processStreamableRequest<ProgressResponse>('pull', {
return this.processStreamableRequest<ProgressResponse>(REQUEST_CONSTANTS.PULL, {
name: request.model,
stream: request.stream,
insecure: request.insecure,
Expand All @@ -218,7 +221,7 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
async push(
request: PushRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
return this.processStreamableRequest<ProgressResponse>('push', {
return this.processStreamableRequest<ProgressResponse>(REQUEST_CONSTANTS.PUSH, {
name: request.model,
stream: request.stream,
insecure: request.insecure,
Expand All @@ -235,7 +238,7 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
await utils.del(this.fetch, `${this.config.host}/api/delete`, {
name: request.model,
})
return { status: 'success' }
return { status: MESSAGES.SUCCESS }
}

/**
Expand All @@ -246,7 +249,7 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
*/
async copy(request: CopyRequest): Promise<StatusResponse> {
await utils.post(this.fetch, `${this.config.host}/api/copy`, { ...request })
return { status: 'success' }
return { status: MESSAGES.SUCCESS }
}

/**
Expand Down
52 changes: 52 additions & 0 deletions src/constants/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
const EMPTY_STRING = ''
const CODE_404 = '404'
const PROTOCOLS = {
HTTP: 'http',
HTTPS: 'https',
} as const
const PORTS = {
HTTP: '80',
HTTPS: '443',
} as const
const MESSAGES = {
MISSING_BODY: 'Missing body',
SUCCESS: 'Success',
FETCHING_TEXT: 'Getting text from response',
ERROR_FETCHING_TEXT: 'Failed to get text from error response',
ERROR_NO_MODEL_FILE: 'Must provide either path or modelfile to create a model',
ERROR_JSON_PARSE: 'Failed to parse error response as JSON',
STREAMING_UPLOADS_NOT_SUPPORTED:
'Streaming uploads are not supported in this environment.',
} as const
const REQUEST_CONSTANTS = {
GENERATE: 'generate',
CREATE: 'create',
PUSH: 'push',
PULL: 'pull',
} as const
const STREAMING_EVENTS = {
DATA: 'data',
END: 'end',
ERROR: 'error',
} as const
const MODEL_FILE_COMMANDS = ['FROM', 'ADAPTER']
const OLLAMA_LOCAL_URL = 'http://127.0.0.1:11434'
const SHA256 = 'sha256'
const ENCODING = {
HEX: 'hex',
BASE64: 'base64',
UTF8: 'utf8',
} as const
export {
EMPTY_STRING,
CODE_404,
PROTOCOLS,
PORTS,
MESSAGES,
REQUEST_CONSTANTS,
STREAMING_EVENTS,
MODEL_FILE_COMMANDS,
OLLAMA_LOCAL_URL,
SHA256,
ENCODING,
}
Loading