Skip to content

Commit

Permalink
PyTest + SQLAlchemy is functional
Browse files Browse the repository at this point in the history
  • Loading branch information
Xithrius committed Oct 11, 2023
1 parent a65b9d8 commit 86e26ef
Show file tree
Hide file tree
Showing 6 changed files with 209 additions and 28 deletions.
6 changes: 5 additions & 1 deletion api/app/database/models/command_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,11 @@
class CommandMetricModel(Base):
__tablename__ = "command_metrics"

id: Mapped[int] = mapped_column(primary_key=True, autoincrement=False)
id: Mapped[int] = mapped_column(
primary_key=True,
autoincrement=True,
nullable=False,
)

used_at: Mapped[datetime] = mapped_column(DateTime, default=now())

Expand Down
18 changes: 18 additions & 0 deletions api/app/routers/command_metric/router.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Annotated

from fastapi import APIRouter, Depends, status
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession

from app.database.dependencies import get_db_session
Expand All @@ -11,6 +12,23 @@
router = APIRouter()


@router.get(
"/",
response_model=list[CommandMetric],
status_code=status.HTTP_200_OK,
)
async def get_all_builds(
session: Annotated[AsyncSession, Depends(get_db_session)],
limit: int | None = 10,
offset: int | None = 0,
) -> list[CommandMetricModel]:
stmt = select(CommandMetricModel).limit(limit).offset(offset)

items = await session.execute(stmt)

return list(items.scalars().fetchall())


@router.post(
"/",
response_model=CommandMetric,
Expand Down
50 changes: 50 additions & 0 deletions api/app/tests/routers/test_command_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import pytest
from fastapi import FastAPI
from httpx import AsyncClient
from sqlalchemy.ext.asyncio import AsyncSession
from starlette import status


@pytest.mark.anyio
async def test_creation(
fastapi_app: FastAPI,
client: AsyncClient,
dbsession: AsyncSession,
) -> None:
url = fastapi_app.url_path_for("create_command_usage_metric")
new_command = {
"command_name": "test_command",
"successfully_completed": True,
}
response = await client.post(url, json=new_command)
assert response.status_code == status.HTTP_201_CREATED

# response = await client.put(
# url,
# json={
# "name": test_name,
# },
# )
# assert response.status_code == status.HTTP_200_OK
# dao = DummyDAO(dbsession)
# instances = await dao.filter(name=test_name)
# assert instances[0].name == test_name


# @pytest.mark.anyio
# async def test_getting(
# fastapi_app: FastAPI,
# client: AsyncClient,
# dbsession: AsyncSession,
# ) -> None:
# """Tests dummy instance retrieval."""
# dao = DummyDAO(dbsession)
# test_name = uuid.uuid4().hex
# await dao.create_dummy_model(name=test_name)
# url = fastapi_app.url_path_for("get_dummy_models")
# response = await client.get(url)
# dummies = response.json()

# assert response.status_code == status.HTTP_200_OK
# assert len(dummies) == 1
# assert dummies[0]["name"] == test_name
5 changes: 2 additions & 3 deletions api/app/utils/graphing.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,12 @@
import numpy as np
from sympy import Symbol, parse_expr

from app.utils.decorators import noblock

matplotlib.use("Agg")
plt.style.use("dark_background")


@noblock
# @noblock
def calculate(
expression: str,
restrict_x: tuple[int | float, int | float],
Expand All @@ -28,7 +27,7 @@ def calculate(
return x, y


@noblock
# @noblock
def graph2d(
x: np.ndarray,
y: np.ndarray,
Expand Down
71 changes: 47 additions & 24 deletions api/migrations/env.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,38 @@
import asyncio
from logging.config import fileConfig

from alembic import context
from app.database import metadata
from sqlalchemy import engine_from_config, pool
from sqlalchemy.ext.asyncio.engine import create_async_engine
from sqlalchemy.future import Connection

from app.database.meta import meta
from app.database.models import load_all_models
from app.settings import settings

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

fileConfig(config.config_file_name)

target_metadata = metadata
load_all_models()
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = meta

config.set_main_option(
"sqlalchemy.url", "postgresql://xythrion:xythrion@localhost:5432/xythrion",
)
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline() -> None:
async def run_migrations_offline() -> None:
"""
Run migrations in 'offline' mode.
Expand All @@ -28,9 +45,8 @@ def run_migrations_offline() -> None:
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
url=str(settings.db_url),
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
Expand All @@ -40,28 +56,35 @@ def run_migrations_offline() -> None:
context.run_migrations()


def run_migrations_online() -> None:
def do_run_migrations(connection: Connection) -> None:
"""
Run actual sync migrations.
:param connection: connection to the database.
"""
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()


async def run_migrations_online() -> None:
"""
Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
connectable = create_async_engine(str(settings.db_url))

with context.begin_transaction():
context.run_migrations()
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)


loop = asyncio.get_event_loop()
if context.is_offline_mode():
run_migrations_offline()
task = run_migrations_offline()
else:
run_migrations_online()
task = run_migrations_online()

loop.run_until_complete(task)
87 changes: 87 additions & 0 deletions api/migrations/versions/b1a0d03ccfce_base_models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
"""
Base models.
Revision ID: b1a0d03ccfce
Revises:
Create Date: 2023-10-11 01:01:51.976366
"""
import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "b1a0d03ccfce"
down_revision = None
branch_labels = None
depends_on = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"command_metrics",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("used_at", sa.DateTime(), nullable=False),
sa.Column("command_name", sa.String(), nullable=False),
sa.Column("successfully_completed", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"drg_builds",
sa.Column("id", sa.Integer(), autoincrement=False, nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("dwarf_class", sa.String(), nullable=False),
sa.Column("build", sa.String(), nullable=False),
sa.Column("overclock", sa.String(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"link_maps",
sa.Column("id", sa.Integer(), autoincrement=False, nullable=False),
sa.Column("server_id", sa.BigInteger(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("from_match", sa.String(), nullable=False),
sa.Column("to_match", sa.String(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"pins",
sa.Column("id", sa.Integer(), autoincrement=False, nullable=False),
sa.Column("server_id", sa.BigInteger(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("message", sa.String(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"trusted",
sa.Column("id", sa.Integer(), autoincrement=False, nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column("at", sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("user_id"),
)
op.create_table(
"web_maps",
sa.Column("id", sa.Integer(), autoincrement=False, nullable=False),
sa.Column("server_id", sa.BigInteger(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("matches", sa.String(), nullable=False),
sa.Column("xpath", sa.String(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("web_maps")
op.drop_table("trusted")
op.drop_table("pins")
op.drop_table("link_maps")
op.drop_table("drg_builds")
op.drop_table("command_metrics")
# ### end Alembic commands ###

0 comments on commit 86e26ef

Please sign in to comment.