Skip to content

Commit

Permalink
Add AI21 Labs Jurassic-2 to the Text Playground #3
Browse files Browse the repository at this point in the history
  • Loading branch information
DennisTraub authored Nov 8, 2023
2 parents 3bacf72 + e31273f commit 7467d92
Show file tree
Hide file tree
Showing 21 changed files with 396 additions and 219 deletions.
File renamed without changes.
27 changes: 27 additions & 0 deletions backend/text_playground/jurassic2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import boto3
import json

bedrock_runtime = boto3.client(
service_name="bedrock-runtime",
region_name="us-east-1",
)

def invoke(prompt, temperature, max_tokens):
prompt_config = {
"prompt": prompt,
"maxTokens": max_tokens,
"temperature": temperature
}

response = bedrock_runtime.invoke_model(
body=json.dumps(prompt_config),
modelId="ai21.j2-mid-v1"
)

response_body = json.loads(response.get("body").read())

completion = response_body["completions"][0]["data"]["text"]
if completion.startswith("\n"):
completion = completion[1:]

return completion
18 changes: 18 additions & 0 deletions backend/text_playground/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,23 @@ class TextRequest(BaseModel):
temperature: float
maxTokens: int

class ClaudeRequest(TextRequest):
prompt: str
# Randomness and diversity
# min: 0, max: 1, default: 0.5
temperature: float = 0.5
# Length
# min: 0, max: 4096, default: 200
maxTokens: int = 200

class Jurassic2Request(TextRequest):
prompt: str
# Randomness and diversity
# min: 0, max: 1, default: 0.5
temperature: float = 0.5
# Length
# min: 0, max: 8191, default: 200
maxTokens: int = 200

class TextResponse(BaseModel):
completion: str
16 changes: 10 additions & 6 deletions backend/text_playground/routes.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
from fastapi import APIRouter
from . import models
from . import services
from . import claude
from . import jurassic2


router = APIRouter()

@router.post("/foundation-models/model/text/{modelId}/invoke")
def invoke(body: models.TextRequest, modelId: str):
if modelId == "anthropic.claude-v2":
completion = claude.invoke(body.prompt, body.temperature, body.maxTokens)
elif modelId == "ai21.j2-mid-v1":
completion = jurassic2.invoke(body.prompt, body.temperature, body.maxTokens)

@router.post("/foundation-models/model/text/anthropic.claude-v2/invoke")
def invoke(body: models.TextRequest):
completion = services.invoke(body.prompt, body.temperature, body.maxTokens)

return models.TextResponse(
completion=completion
)
)
2 changes: 1 addition & 1 deletion frontend/app/chat/page.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"use client";

import ChatContainer from "@/components/chat/ChatContainer";
import ChatContainer from "@/components/chatPlayground/ChatComponent";

export default async function Chat() {
return (
Expand Down
2 changes: 1 addition & 1 deletion frontend/app/image/page.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"use client";

import ImageContainer from "@/components/Image/ImageContainer";
import ImageContainer from "@/components/imagePlayground/ImageComponent";

export default async function Chat() {
return (
Expand Down
2 changes: 1 addition & 1 deletion frontend/app/models/[modelId]/page.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"use client"

import React, {useEffect, useState} from "react";
import ModelDetails from "@/components/models/ModelDetails";
import ModelDetails from "@/components/foundationModels/ModelDetails";
import GlobalConfig from "@/app/app.config";

export default function Model({params: {modelId}}) {
Expand Down
2 changes: 1 addition & 1 deletion frontend/app/text/page.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"use client";

import TextContainer from "@/components/text/TextContainer";
import TextContainer from "@/components/textPlayground/TextComponent";

export default async function Chat() {
return (
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
"use client";

import Human from "@/components/chat/Human";
import Human from "@/components/chatPlayground/Human";
import React, { useState } from "react";
import Assistant from "@/components/chat/Assistant";
import Loader from "@/components/chat/Loader";
import Assistant from "@/components/chatPlayground/Assistant";
import Loader from "@/components/chatPlayground/Loader";
import GlobalConfig from "@/app/app.config";

export default function ChatContainer() {
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import React, { useState } from 'react';
import Image from 'next/image';
import Spinner from "@/components/Spinner";
import StyleSelector from "@/components/image/StyleSelector";
import StyleSelector from "@/components/imagePlayground/StyleSelector";
import GlobalConfig from "@/app/app.config";

export default function ImageContainer() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@ export default function StyleSelector({ onStyleChange }) {
<button id="dropdown-button"
onClick={toggleDropdown}
className="inline-flex justify-center w-full px-4 py-2 text-sm font-medium text-gray-700 bg-white border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-offset-gray-100 focus:ring-blue-500">
<span className="mr-2">{selectedStyle}</span>
<span className="mr-auto">{selectedStyle}</span>
<svg xmlns="http://www.w3.org/2000/svg"
className="w-5 h-5 ml-2 -mr-1"
className="w-5 h-5 ml-2 -mr-1 ml-auto"
viewBox="0 0 20 20"
fill="currentColor"
aria-hidden="true">
Expand Down
50 changes: 50 additions & 0 deletions frontend/components/shared/TextModelSelector.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
"use client"

import React, {useState} from "react";
import { models } from './textModels';

export default function TextModelSelector({ model, onModelChange }) {
const [isOpen, setIsOpen] = useState(false);

const toggleDropdown = () => {
setIsOpen(!isOpen);
};

const selectModel = (item) => {
setIsOpen(false);
onModelChange(item);
};

return (
<div className="w-64 mb-4">
<div className="relative w-full">
<button id="dropdown-button"
onClick={toggleDropdown}
className="inline-flex justify-left w-full px-4 py-2 text-sm font-medium text-gray-700 bg-white border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-offset-gray-100 focus:ring-blue-500">
<span className="mr-auto">Model: {model.modelName}</span>
<svg xmlns="http://www.w3.org/2000/svg"
className="w-5 h-5 ml-2 -mr-1 ml-auto"
viewBox="0 0 20 20"
fill="currentColor"
aria-hidden="true">
<path fillRule="evenodd"
d="M6.293 9.293a1 1 0 011.414 0L10 11.586l2.293-2.293a1 1 0 111.414 1.414l-3 3a1 1 0 01-1.414 0l-3-3a1 1 0 010-1.414z"
clipRule="evenodd" />
</svg>
</button>
{isOpen && (
<div className="absolute right-0 mt-2 rounded-md shadow-lg bg-white ring-1 ring-black ring-opacity-5 p-1 text-sm w-64">
{models.map((item, index) => (
<a key={index}
onClick={() => selectModel(item)}
href="#"
className="block px-4 py-2 text-gray-700 hover:bg-gray-100 active:bg-blue-100 cursor-pointer rounded-md">
{item.modelName}
</a>
))}
</div>
)}
</div>
</div>
);
};
38 changes: 38 additions & 0 deletions frontend/components/shared/textModels.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
export const defaultModel = {
modelName: "Anthropic Claude V2",
modelId: "anthropic.claude-v2",
temperatureRange: {
min: 0,
max: 1,
default: 0.5
},
maxTokenRange: {
min: 0,
max: 4096,
default: 200
}
}

export const models = [
defaultModel,
{
modelName: "AI21 Labs Jurassic-2",
modelId: "ai21.j2-mid-v1",
temperatureRange: {
min: 0,
max: 1,
default: 0.5
},
maxTokenRange: {
min: 0,
max: 8191,
default: 200
}
}
]

export const defaultPayload = {
prompt: "",
temperature: defaultModel.temperatureRange.default,
maxTokens: defaultModel.maxTokenRange.default
}
Loading

0 comments on commit 7467d92

Please sign in to comment.