Skip to content

Commit

Permalink
Merge branch 'main' into docs
Browse files Browse the repository at this point in the history
  • Loading branch information
jaredpalmer committed May 24, 2023
2 parents ba99cbc + e7bc7ad commit ccea4b8
Show file tree
Hide file tree
Showing 44 changed files with 738 additions and 1,763 deletions.
2 changes: 0 additions & 2 deletions .eslintignore

This file was deleted.

55 changes: 0 additions & 55 deletions .eslintrc

This file was deleted.

10 changes: 10 additions & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
module.exports = {
root: true,
// This tells ESLint to load the config from the package `eslint-config-vercel-ai`
extends: ['vercel-ai'],
settings: {
next: {
rootDir: ['apps/*/'],
},
},
};
14 changes: 9 additions & 5 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,31 +11,35 @@ jobs:
release:
name: Release
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout Repo
uses: actions/checkout@v2

- name: Setup pnpm
uses: pnpm/action-setup@v2.2.2
- name: Setup pnpm 7
uses: pnpm/action-setup@v2
with:
version: 7

- name: Setup Node.js 16.x
uses: actions/setup-node@v2
with:
node-version: 16.x
cache: 'pnpm'

- name: Install Dependencies
run: pnpm install
run: pnpm i

- name: Create Release Pull Request or Publish to npm
id: changesets
uses: changesets/action@v1
with:
# This expects you to have a script called release which does a build for your packages and calls changeset publish
publish: pnpm release
version: pnpm version-packages
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN_ELEVATED }}

- name: Send a Slack notification if a publish happens
if: steps.changesets.outputs.published == 'true'
# You can do something when a publish happens.
run: my-slack-bot send-notification --message "A new version of ${GITHUB_REPOSITORY} was published!"
21 changes: 13 additions & 8 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
# build
dist/

# dependencies
node_modules/

# logs
npm-debug.log
.DS_Store
node_modules
.turbo
*.log
.next
dist
dist-ssr
*.local
.env
.cache
server/dist
public/dist
.turbo
3 changes: 1 addition & 2 deletions .npmrc
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
save-exact = true
strict-peer-dependencies=false
auto-install-peers = true
3 changes: 0 additions & 3 deletions .prettierignore

This file was deleted.

51 changes: 24 additions & 27 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,21 +46,21 @@ The goal of this library lies in its commitment to work directly with each AI/Mo

```tsx
// app/api/generate/route.ts
import { Configuration, OpenAIApi } from 'openai-edge';
import { OpenAITextStream, StreamingTextResponse } from '@vercel/ai-utils';
import { Configuration, OpenAIApi } from "openai-edge";
import { OpenAITextStream, StreamingTextResponse } from "@vercel/ai-utils";

const config = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});
const openai = new OpenAIApi(config);

export const runtime = 'edge';
export const runtime = "edge";

export async function POST() {
const response = await openai.createChatCompletion({
model: 'gpt-4',
model: "gpt-4",
stream: true,
messages: [{ role: 'user', content: 'What is love?' }],
messages: [{ role: "user", content: "What is love?" }],
});
const stream = OpenAITextStream(response);
return new StreamingTextResponse(stream);
Expand Down Expand Up @@ -99,11 +99,8 @@ Create a Next.js Route Handler that uses the Edge Runtime that we'll use to gene

```tsx
// ./app/api/generate/route.ts
import { Configuration, OpenAIApi } from 'openai-edge';
import {
OpenAITextStream,
StreamingTextResponse
} from '@vercel/ai-utils';
import { Configuration, OpenAIApi } from "openai-edge";
import { OpenAITextStream, StreamingTextResponse } from "@vercel/ai-utils";

// Create an OpenAI API client (that's edge friendly!)
const config = new Configuration({
Expand All @@ -112,15 +109,15 @@ const config = new Configuration({
const openai = new OpenAIApi(config);

// IMPORTANT! Set the runtime to edge
export const runtime = 'edge';
export const runtime = "edge";

export async function POST(req: Request) {
// Extract the `prompt` from the body of the request
const { prompt } = await req.json();

// Ask OpenAI for a streaming chat completion given the prompt
const response = await openai.createCompletion({
model: 'gpt-3.5-turbo',
model: "gpt-3.5-turbo",
stream: true,
prompt,
});
Expand All @@ -139,21 +136,21 @@ Create a Client component with a form that we'll use to gather the prompt from t

```tsx
// ./app/form.ts
'use client';
"use client";

import { useState } from 'react';
import { useCompletion } from '@vercel/ai-utils/react'; //@todo
import { useState } from "react";
import { useCompletion } from "@vercel/ai-utils/react"; //@todo

export function Form() {
const [value, setValue] = useState('');
const { setPrompt, completion } = useCompletion('/api/generate');
const [value, setValue] = useState("");
const { setPrompt, completion } = useCompletion("/api/generate");
return (
<div>
<form
onSubmit={(e) => {
e.preventDefault();
setPrompt(value);
setValue('');
setValue("");
}}
>
<textarea value={value} onChange={(e) => setValue(e.target.value)} />
Expand Down Expand Up @@ -211,16 +208,16 @@ A transform that will extract the text from _most_ chat and completion HuggingFa

```tsx
// app/api/generate/route.ts
import { HfInference } from '@huggingface/inference';
import { HuggingFaceStream, StreamingTextResponse } from '@vercel/ai-utils';
import { HfInference } from "@huggingface/inference";
import { HuggingFaceStream, StreamingTextResponse } from "@vercel/ai-utils";

export const runtime = 'edge';
export const runtime = "edge";

const Hf = new HfInference(process.env.HUGGINGFACE_API_KEY);

export async function POST() {
const response = await Hf.textGenerationStream({
model: 'OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5',
model: "OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5",
inputs: `<|prompter|>What's the Earth total population?<|endoftext|><|assistant|>`,
parameters: {
max_new_tokens: 200,
Expand All @@ -242,19 +239,19 @@ This is a tiny wrapper around `Response` class that makes returning `ReadableStr

```tsx
// app/api/generate/route.ts
import { OpenAITextStream, StreamingTextResponse } from '@vercel/ai-utils';
import { OpenAITextStream, StreamingTextResponse } from "@vercel/ai-utils";

export const runtime = 'edge';
export const runtime = "edge";

export async function POST() {
const response = await openai.createChatCompletion({
model: 'gpt-4',
model: "gpt-4",
stream: true,
messages: { role: 'user', content: 'What is love?' },
messages: { role: "user", content: "What is love?" },
});
const stream = OpenAITextStream(response);
return new StreamingTextResponse(stream, {
'X-RATE-LIMIT': 'lol',
"X-RATE-LIMIT": "lol",
}); // => new Response(stream, { status: 200, headers: { 'Content-Type': 'text/plain; charset=utf-8', 'X-RATE-LIMIT': 'lol' }})
}
```
4 changes: 4 additions & 0 deletions apps/docs/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
module.exports = {
root: true,
extends: ["vercel-ai"],
};
5 changes: 5 additions & 0 deletions apps/docs/next-env.d.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
/// <reference types="next" />
/// <reference types="next/image-types/global" />

// NOTE: This file should not be edited
// see https://nextjs.org/docs/basic-features/typescript for more information.
2 changes: 1 addition & 1 deletion apps/docs/next.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ const withNextra = require('nextra')({
themeConfig: './theme.config.jsx'
})

module.exports = withNextra()
module.exports = withNextra()
Empty file.
11 changes: 11 additions & 0 deletions apps/docs/src/app/layout.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
export default function RootLayout({
children,
}: {
children: React.ReactNode;
}) {
return (
<html lang="en">
<body>{children}</body>
</html>
);
}
8 changes: 8 additions & 0 deletions apps/docs/src/app/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
export default function Page() {
return (
<>
<h1>Web</h1>
<Button>Boop</Button>
</>
);
}
14 changes: 14 additions & 0 deletions apps/docs/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"exclude": ["node_modules"],
"extends": "@vercel/ai-tsconfig/nextjs.json",
"compilerOptions": {
"outDir": "dist",
"incremental": true,
"plugins": [
{
"name": "next"
}
]
},
"include": ["src", "next-env.d.ts", ".next/types/**/*.ts"]
}
Loading

0 comments on commit ccea4b8

Please sign in to comment.