diff --git a/.github/workflows/config.yaml b/.github/workflows/config.yaml new file mode 100644 index 0000000..7cef17c --- /dev/null +++ b/.github/workflows/config.yaml @@ -0,0 +1,31 @@ +name: Run Tests + +on: [push] + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: 3.9 + cache: 'pip' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt -r requirements-dev.txt + + - name: Run tests + run: | + echo ">> Running all test cases" + python3 -m pytest -s tests + # env: + # Set your environment variables here, if they are sensitive, use secrets. + # Example: + # MY_ENV_VAR: ${{ secrets.MY_ENV_VAR }} diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml new file mode 100644 index 0000000..bae1a8a --- /dev/null +++ b/.github/workflows/deploy.yaml @@ -0,0 +1,25 @@ +on: + push: + branches: + - master # or the name of your default branch + +jobs: + deploy: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Install Node.js + uses: actions/setup-node@v2 + with: + node-version: '16' # or your preferred Node.js version + + - name: Install Vercel CLI + run: npm install -g vercel + + - name: Deploy to Vercel + run: vercel . --token ${{ secrets.VERCEL_TOKEN }} --confirm # You might need the --confirm flag if you haven't set up the project yet + env: + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} \ No newline at end of file diff --git a/server/__pycache__/api.cpython-310.pyc b/server/__pycache__/api.cpython-310.pyc index 54cc46b..9e11f6a 100644 Binary files a/server/__pycache__/api.cpython-310.pyc and b/server/__pycache__/api.cpython-310.pyc differ diff --git a/server/api.py b/server/api.py index 66316e7..3f50739 100644 --- a/server/api.py +++ b/server/api.py @@ -1,50 +1,48 @@ -from fastapi import FastAPI, Request -from fastapi.templating import Jinja2Templates -from typing import Any, Optional -from pydantic import BaseModel, Field +"App Entrypoint" +import logging import openai import os -app = FastAPI() +from fastapi import FastAPI, Request, HTTPException +from fastapi.templating import Jinja2Templates + +from server.models.request import Message +from server.models.response import ResponseStatus, ResponseChat + +app = FastAPI(title="🤖 Prompt Engineers AI - Serverless Chat") os.environ.get('OPENAI_API_KEY') templates = Jinja2Templates(directory="static") +logger = logging.getLogger("uvicorn.error") ####################################################################### ### Pages ####################################################################### -@app.get("/", tags=["Pages"]) +@app.get("/", tags=["Pages"], include_in_schema=False) async def chat_interface(request: Request): """Serves the index page.""" return templates.TemplateResponse( "pages/index.html", {"request": request, "current_page": "home"} ) + +####################################################################### +### Status Endpoints +####################################################################### +@app.get("/status", tags=["Status"], response_model=ResponseStatus) +async def get_application_version(): + """Check the application status.""" + try: + return { + "version": os.getenv("APP_VERSION", ''), + } + except Exception as err: + logger.exception(err) + raise HTTPException(status_code=500, detail="Internal Server Error") -################################################# -## ChatGPT -################################################# -class Message(BaseModel): # pylint: disable=too-few-public-methods - """A message to send to the chatbot.""" - model: Optional[str] = None - messages: Optional[Any] = None - temperature: Optional[float or int] = None - - class Config: # pylint: disable=too-few-public-methods - """A message to send to the chatbot.""" - json_schema_extra = { - "example": { - "model": "gpt-3.5-turbo", - "temperature": 0.8, - "messages": [ - {"role": "system", "content": "You are a helpful assistant."}, - {"role": "user", "content": 'Who won the 2001 world series?'}, - {"role": "assistant", "content": 'The arizona diamondbacks won the 2001 world series.'}, - {"role": "user", "content": 'Who were the pitchers?'}, - ] - } - } - -@app.post("/chat", tags=["Chat"]) +####################################################################### +### API Endpoints +####################################################################### +@app.post("/chat", tags=["Chat"], response_model=ResponseChat) async def chat_endpoint(body: Message): try: result = openai.ChatCompletion.create( diff --git a/server/models/request.py b/server/models/request.py new file mode 100644 index 0000000..71461a4 --- /dev/null +++ b/server/models/request.py @@ -0,0 +1,26 @@ +from typing import Any, Optional +from pydantic import BaseModel, Field + +################################################# +## ChatGPT +################################################# +class Message(BaseModel): # pylint: disable=too-few-public-methods + """A message to send to the chatbot.""" + model: Optional[str] = None + messages: Optional[Any] = None + temperature: Optional[float or int] = None + + class ConfigDict: # pylint: disable=too-few-public-methods + """A message to send to the chatbot.""" + json_schema_extra = { + "example": { + "model": "gpt-3.5-turbo", + "temperature": 0.8, + "messages": [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": 'Who won the 2001 world series?'}, + {"role": "assistant", "content": 'The arizona diamondbacks won the 2001 world series.'}, + {"role": "user", "content": 'Who were the pitchers?'}, + ] + } + } \ No newline at end of file diff --git a/server/models/response.py b/server/models/response.py new file mode 100644 index 0000000..8f5bc77 --- /dev/null +++ b/server/models/response.py @@ -0,0 +1,41 @@ +from typing import Any, Optional, List +from pydantic import BaseModel, Field + + +class ResponseStatus(BaseModel): + version: str = Field(default='v0.0.15') + +################################################# +## ChatGPT +################################################# + +class ResponseChat(BaseModel): + # data: ChatCompletion + + class ConfigDict: # pylint: disable=too-few-public-methods + """A message to send to the chatbot.""" + json_schema_extra = { + "example": { + "chat": { + "id": "chatcmpl-7myaL...", + "object": "chat.completion", + "created": 1691906989, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "The Arizona Diamondbacks had Randy Johnson and Curt Schilling as their primary pitchers during the 2001 World Series. Both pitchers had exceptional performances throughout the series." + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 52, + "completion_tokens": 32, + "total_tokens": 84 + } + } + } + } diff --git a/tests/integration/test_main.py b/tests/integration/test_main.py new file mode 100644 index 0000000..7b429cf --- /dev/null +++ b/tests/integration/test_main.py @@ -0,0 +1,11 @@ +from starlette.testclient import TestClient + +from server.api import app + +client = TestClient(app) + +def test_ping(): + response = client.get("/status") + # print(response.json()) + assert response.status_code == 200 + assert "version" in response.json() \ No newline at end of file diff --git a/tests/unit/test_example.py b/tests/unit/test_example.py new file mode 100644 index 0000000..7f8295c --- /dev/null +++ b/tests/unit/test_example.py @@ -0,0 +1,11 @@ +"""test example module.""" +import unittest + + +class TestExampleCase(unittest.TestCase): + + # @unittest.skip("Example Test Case") + def test_retrieve_files(self): + """Test that the files are retrieved.""" + token = "test" + assert token == "test" \ No newline at end of file