Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add docker compose to support local development #10

Open
wants to merge 6 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -315,5 +315,4 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

/cache-directory/
.env
32 changes: 32 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,35 @@
## License

[BSD 2-Clause License](https://opensource.org/license/bsd-2-clause)

## Local dev with docker compose

### Setup

To set up a local development environment, first clone the repository.

Now, set the required environment variables in a `.env` file (n.b. the leading '.' is important).

```bash
cp .env.template .env # to create a new .env file
nano .env # to edit the file
```

Then run the following command to bring up the services:

```bash
docker compose up -d # to start the services in the background
docker compose logs -f # to see the logs
```

### Accessing Fuseki

The Fuseki server is available at `http://localhost:3030/`.

The default username and password are `admin` and `admin` respectively.

### Accessing FastAPI

An API for depositing ROCrates is available at `http://localhost:8000/`.

Documentation can be seen at `http://localhost:8000/docs`.
33 changes: 33 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
services:
fuseki:
image: stain/jena-fuseki
container_name: ${PROJECT_NAME}-fuseki
ports:
- "3030:3030"
environment:
ADMIN_PASSWORD: admin
FUSEKI_DATASET_1: wfh
volumes:
- jena-data:/fuseki
networks:
- wfh

fastapi:
image: wfh
container_name: ${PROJECT_NAME}-fastapi
build:
context: upload-crate
dockerfile: Dockerfile
ports:
- "8000:80"
environment:
- FUSEKI_URL=http://${PROJECT_NAME}-fuseki:3030
networks:
- wfh

volumes:
jena-data:

networks:
wfh:

1 change: 1 addition & 0 deletions env.template
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
PROJECT_NAME=nameofproduct
11 changes: 11 additions & 0 deletions upload-crate/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM python:3.10

# Install fastapi omnicli wrapper

WORKDIR /code
COPY ./requirements.txt /code/requirements.txt

RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
COPY ./app /code/app

CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "80", "--reload"]
Empty file added upload-crate/app/__init__.py
Empty file.
131 changes: 131 additions & 0 deletions upload-crate/app/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
import pexpect
import json
import ontospy
import requests
import os
import uuid

from contextlib import asynccontextmanager
from llama_index.llms.ollama import Ollama
from llama_index.core.llms import ChatMessage

from fastapi import FastAPI, HTTPException, File, UploadFile, Request

# -----------------------------------------------------------------------------
# Constants

FUSEKI_URL = os.getenv("FUSEKI_URL")

# -----------------------------------------------------------------------------
# Lifespan with Ollama model

ollama = {}

@asynccontextmanager
async def lifespan(app: FastAPI):
# Create an Ollama instance
ollama['model'] = Ollama(
model="llama3",
base_url="http://randall-ollama:11434",
request_timeout=30.0,
#json_mode=True
)
yield

ollama['model'].clear()

# -----------------------------------------------------------------------------
# Session

app = FastAPI(lifespan=lifespan)

# -----------------------------------------------------------------------------
# Routes

# Route which accepts a json payload and uploads it to the triplestore
@app.post("/upload/message")
async def upload_message(request: Request):
data = await request.json()

# Extract the triples from the data
triples_n3 = []
for metric in data['metrics']:
# Randomly generate unique identifier for the message
message_id = f"{uuid.uuid4()}"

# Deconstruct topic into series of linked triples
topic = metric['tags']['topic']
topic_parts = topic.split('/')

# Head triple (a message was recorded at timestamp)
triples_n3.append(f'<{topic}> <message> <{metric["timestamp"]}> .')
triples_n3.append(f'<{metric["timestamp"]}> <message> <{message_id}> .')

# Value triple
triples_n3.append(f'<{message_id}> <value> <{metric["fields"]["value"]}> .')

# Upload the triples to the triplestore
for triple in triples_n3:
try:
query = f"update=insert data {{ {triple} }}"
req = requests.post(f"{FUSEKI_URL}/asdf/update",
data=str(query),
auth=('admin', 'admin',),
headers={'Content-Type': 'application/x-www-form-urlencoded'}
)
except requests.exceptions.RequestException as e:
print(f"Triple: {triple} caused an error!")
print(e)
return {"error": e}

return

@app.post("/upload/crate")
def upload_crate(file: UploadFile):
fileString = file.file.read()
model = ontospy.Ontospy(data=fileString,
rdf_format="json-ld",
verbose=False,
hide_implicit_types=False,
hide_base_schemas=False,
hide_implicit_preds=False,
hide_individuals=False
)

# Extract the triples from the model
triples = model.query("SELECT ?s ?p ?o WHERE { ?s ?p ?o }")
print(triples)

# Upload the triples to the triplestore
for triple in triples:
# Convert to n3 format
triple_n3 = f'<{triple[0]}> <{triple[1]}> "{triple[2]}" .'

# Add to the query
query = f"update=insert data {{ {triple_n3} }}"

try:
req = requests.post(f"{FUSEKI_URL}/asdf/update",
data=str(query),
auth=('admin', 'admin',),
headers={'Content-Type': 'application/x-www-form-urlencoded'}
)
except requests.exceptions.RequestException as e:
print(f"Triple: {triple} caused an error!")
print(e)
return {"error": e}
print(req.text)

return {"filename": file.filename}


# -----------------------------------------------------------------------------
# Chat endpoints

@app.post("/chat")
def chat(request: Request, query: str):
# Get the answer to the query
response = ollama['model'].complete(query)

return {"response": response}

24 changes: 24 additions & 0 deletions upload-crate/app/test_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from fastapi.testclient import TestClient

from .main import app

client = TestClient(app)

################################################################################
# Test correctly formed GET calls for endpoints defined in main.py

def test_help():
response = client.get("/omnicli/help/")
assert response.status_code == 200
assert response.json() == {"output":" help : Print this help message\n quit : Quit the interactive terminal\n log <level> : Change the log level\n list <url> : List the contents of a folder\n stat <url> : Print information about a specified file or folder\n cd <url> : Makes paths relative to the specified folder\n push <url> : Makes paths relative to the specified folder\n You can restore the original folder with pop\n pop : Restores a folder pushed with 'push'\n copy <src> <dst> : Copies a file or folder from src to dst (overwrites dst)\n move <src> <dst> : Moves a file or folder from src to dst (overwrites dst)\n del <url> : Deletes the specified file or folder\n mkdir <url> : Create a folder\n cat <url> : Print the contents of a file\n cver : Print the client version\n rver : Print the USD Resolver Plugin version\n sver <url> : Print the server version\n load <url> : Load a USD file\n save [url] : Save a previously loaded USD file (optionally to a different URL)\n close : Close a previously loaded USD file\n lock [url] : Lock a USD file (defaults to loaded stage root)\n unlock [url] : Unlock a USD file (defaults to loaded stage root)\n getacls <url> : Print the ACLs for a URL\n setacls <url> <user|group> <r|w|a|-> : Change the ACLs for a user or group for a URL\n Specify '-' to remove that user|group from the ACLs\n auth [username] [password] : Set username/password for authentication\n Password defaults to username; blank reverts to standard auth\n checkpoint <url> [comment] : Create a checkpoint of a URL\n listCheckpoints <url> : List all checkpoints of a URL\n restoreCheckpoint <url> : Restore a checkpoint\n disconnect <url> : Disconnect from a server\n join <url> : Join a channel. Only one channel can be joined at a time.\n send <message> : Send a message to the joined channel.\n leave : Leave the joined channel\n"}


def test_log():
response = client.get("/omnicli/log/")
assert response.status_code == 422
assert response.json() == {"detail":[{"loc":["query","level"],"msg":"field required","type":"value_error.missing"}]}

response = client.get("/omnicli/log/?level=info")
assert response.status_code == 200
assert response.json() == {"output":"Log level set to info"}

8 changes: 8 additions & 0 deletions upload-crate/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
fastapi
pydantic
uvicorn[standard]
pytest
requests
pexpect
python-multipart
ontospy