-
Notifications
You must be signed in to change notification settings - Fork 39
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Allie Crevier
committed
Nov 3, 2020
1 parent
6861437
commit 15ba68a
Showing
4 changed files
with
471 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
"""add seen tables | ||
Revision ID: bd57477f19a2 | ||
Revises: a4bf1f58ce69 | ||
Create Date: 2020-10-20 22:43:46.743035 | ||
""" | ||
import sqlalchemy as sa | ||
|
||
from alembic import op | ||
|
||
# revision identifiers, used by Alembic. | ||
revision = 'bd57477f19a2' | ||
down_revision = 'a4bf1f58ce69' | ||
branch_labels = None | ||
depends_on = None | ||
|
||
|
||
def upgrade(): | ||
# ### commands auto generated by Alembic - please adjust! ### | ||
op.create_table('seen_files', | ||
sa.Column('id', sa.Integer(), nullable=False), | ||
sa.Column('file_id', sa.Integer(), nullable=False), | ||
sa.Column('journalist_id', sa.Integer(), nullable=True), | ||
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name=op.f('fk_seen_files_file_id_files')), | ||
sa.ForeignKeyConstraint(['journalist_id'], ['users.id'], name=op.f('fk_seen_files_journalist_id_users')), | ||
sa.PrimaryKeyConstraint('id', name=op.f('pk_seen_files')), | ||
sa.UniqueConstraint('file_id', 'journalist_id', name=op.f('uq_seen_files_file_id')) | ||
) | ||
op.create_table('seen_messages', | ||
sa.Column('id', sa.Integer(), nullable=False), | ||
sa.Column('message_id', sa.Integer(), nullable=False), | ||
sa.Column('journalist_id', sa.Integer(), nullable=True), | ||
sa.ForeignKeyConstraint(['journalist_id'], ['users.id'], name=op.f('fk_seen_messages_journalist_id_users')), | ||
sa.ForeignKeyConstraint(['message_id'], ['messages.id'], name=op.f('fk_seen_messages_message_id_messages')), | ||
sa.PrimaryKeyConstraint('id', name=op.f('pk_seen_messages')), | ||
sa.UniqueConstraint('message_id', 'journalist_id', name=op.f('uq_seen_messages_message_id')) | ||
) | ||
op.create_table('seen_replies', | ||
sa.Column('id', sa.Integer(), nullable=False), | ||
sa.Column('reply_id', sa.Integer(), nullable=False), | ||
sa.Column('journalist_id', sa.Integer(), nullable=True), | ||
sa.ForeignKeyConstraint(['journalist_id'], ['users.id'], name=op.f('fk_seen_replies_journalist_id_users')), | ||
sa.ForeignKeyConstraint(['reply_id'], ['replies.id'], name=op.f('fk_seen_replies_reply_id_replies')), | ||
sa.PrimaryKeyConstraint('id', name=op.f('pk_seen_replies')), | ||
sa.UniqueConstraint('reply_id', 'journalist_id', name=op.f('uq_seen_replies_reply_id')) | ||
) | ||
# ### end Alembic commands ### | ||
|
||
|
||
def downgrade(): | ||
# ### commands auto generated by Alembic - please adjust! ### | ||
op.drop_table('seen_replies') | ||
op.drop_table('seen_messages') | ||
op.drop_table('seen_files') | ||
# ### end Alembic commands ### |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,232 @@ | ||
# -*- coding: utf-8 -*- | ||
|
||
import os | ||
import random | ||
import subprocess | ||
|
||
import pytest | ||
from sqlalchemy import text | ||
from sqlalchemy.exc import IntegrityError | ||
|
||
from securedrop_client import db | ||
from securedrop_client.db import Reply, User | ||
|
||
from .utils import ( | ||
add_file, | ||
add_message, | ||
add_reply, | ||
add_source, | ||
add_user, | ||
mark_file_as_seen, | ||
mark_message_as_seen, | ||
mark_reply_as_seen, | ||
) | ||
|
||
|
||
class UpgradeTester: | ||
""" | ||
Verify that upgrading to the target migration results in the creation of the seen tables. | ||
""" | ||
|
||
NUM_USERS = 20 | ||
NUM_SOURCES = 20 | ||
NUM_REPLIES = 40 | ||
|
||
def __init__(self, homedir): | ||
subprocess.check_call(["sqlite3", os.path.join(homedir, "svs.sqlite"), ".databases"]) | ||
self.session = db.make_session_maker(homedir)() | ||
|
||
def load_data(self): | ||
for source_id in range(1, self.NUM_SOURCES + 1): | ||
add_source(self.session) | ||
|
||
# Add zero to a few messages from each source, some messages are set to downloaded | ||
for _ in range(random.randint(0, 2)): | ||
add_message(self.session, source_id) | ||
|
||
# Add zero to a few files from each source, some files are set to downloaded | ||
for i in range(random.randint(0, 2)): | ||
add_file(self.session, source_id) | ||
|
||
self.session.commit() | ||
|
||
for i in range(self.NUM_USERS): | ||
if i == 0: | ||
# As of this migration, the server tells the client that the associated journalist | ||
# of a reply has been deleted by returning "deleted" as the uuid of the associated | ||
# journalist. This gets stored as the jouranlist_id in the replies table. | ||
# | ||
# Make sure to test this case as well. | ||
add_user(self.session, "deleted") | ||
source_id = random.randint(1, self.NUM_SOURCES) | ||
add_reply(self.session, "deleted", source_id) | ||
else: | ||
add_user(self.session) | ||
|
||
self.session.commit() | ||
|
||
# Add replies from randomly-selected journalists to a randomly-selected sources | ||
for _ in range(1, self.NUM_REPLIES): | ||
journalist_id = random.randint(1, self.NUM_USERS) | ||
source_id = random.randint(1, self.NUM_SOURCES) | ||
add_reply(self.session, journalist_id, source_id) | ||
|
||
self.session.commit() | ||
|
||
def check_upgrade(self): | ||
""" | ||
Make sure seen tables exist and work as expected. | ||
""" | ||
replies = self.session.query(Reply).all() | ||
assert len(replies) | ||
|
||
for reply in replies: | ||
# Will fail if User does not exist | ||
self.session.query(User).filter_by(id=reply.journalist_id).one() | ||
|
||
sql = "SELECT * FROM files" | ||
files = self.session.execute(text(sql)).fetchall() | ||
|
||
sql = "SELECT * FROM messages" | ||
messages = self.session.execute(text(sql)).fetchall() | ||
|
||
sql = "SELECT * FROM replies" | ||
replies = self.session.execute(text(sql)).fetchall() | ||
|
||
# Now seen tables exist, so you should be able to mark some files, messages, and replies | ||
# as seen | ||
for file in files: | ||
if random.choice([0, 1]): | ||
selected_journo_id = random.randint(1, self.NUM_USERS) | ||
mark_file_as_seen(self.session, file.id, selected_journo_id) | ||
for message in messages: | ||
if random.choice([0, 1]): | ||
selected_journo_id = random.randint(1, self.NUM_USERS) | ||
mark_message_as_seen(self.session, message.id, selected_journo_id) | ||
for reply in replies: | ||
if random.choice([0, 1]): | ||
selected_journo_id = random.randint(1, self.NUM_USERS) | ||
mark_reply_as_seen(self.session, reply.id, selected_journo_id) | ||
|
||
# Check unique constraint on (reply_id, journalist_id) | ||
params = {"reply_id": 100, "journalist_id": 100} | ||
sql = """ | ||
INSERT INTO seen_replies (reply_id, journalist_id) | ||
VALUES (:reply_id, :journalist_id); | ||
""" | ||
self.session.execute(text(sql), params) | ||
with pytest.raises(IntegrityError): | ||
self.session.execute(text(sql), params) | ||
|
||
# Check unique constraint on (message_id, journalist_id) | ||
params = {"message_id": 100, "journalist_id": 100} | ||
sql = """ | ||
INSERT INTO seen_messages (message_id, journalist_id) | ||
VALUES (:message_id, :journalist_id); | ||
""" | ||
self.session.execute(text(sql), params) | ||
with pytest.raises(IntegrityError): | ||
self.session.execute(text(sql), params) | ||
|
||
# Check unique constraint on (file_id, journalist_id) | ||
params = {"file_id": 101, "journalist_id": 100} | ||
sql = """ | ||
INSERT INTO seen_files (file_id, journalist_id) | ||
VALUES (:file_id, :journalist_id); | ||
""" | ||
self.session.execute(text(sql), params) | ||
with pytest.raises(IntegrityError): | ||
self.session.execute(text(sql), params) | ||
|
||
|
||
class DowngradeTester: | ||
""" | ||
Verify that downgrading from the target migration keeps in place the updates from the migration | ||
since there is no need to add bad data back into the db (the migration is backwards compatible). | ||
""" | ||
|
||
NUM_USERS = 20 | ||
NUM_SOURCES = 20 | ||
NUM_REPLIES = 40 | ||
|
||
def __init__(self, homedir): | ||
subprocess.check_call(["sqlite3", os.path.join(homedir, "svs.sqlite"), ".databases"]) | ||
self.session = db.make_session_maker(homedir)() | ||
|
||
def load_data(self): | ||
for source_id in range(1, self.NUM_SOURCES + 1): | ||
add_source(self.session) | ||
|
||
# Add zero to a few messages from each source, some messages are set to downloaded | ||
for _ in range(random.randint(0, 3)): | ||
add_message(self.session, source_id) | ||
|
||
# Add zero to a few files from each source, some files are set to downloaded | ||
for i in range(random.randint(0, 3)): | ||
add_file(self.session, source_id) | ||
|
||
self.session.commit() | ||
|
||
for i in range(self.NUM_USERS): | ||
if i == 0: | ||
# As of this migration, the server tells the client that the associated journalist | ||
# of a reply has been deleted by returning "deleted" as the uuid of the associated | ||
# journalist. This gets stored as the jouranlist_id in the replies table. | ||
# | ||
# Make sure to test this case as well. | ||
add_user(self.session, "deleted") | ||
source_id = random.randint(1, self.NUM_SOURCES) | ||
add_reply(self.session, "deleted", source_id) | ||
else: | ||
add_user(self.session) | ||
|
||
self.session.commit() | ||
|
||
# Add replies from randomly-selected journalists to a randomly-selected sources | ||
for _ in range(1, self.NUM_REPLIES): | ||
journalist_id = random.randint(1, self.NUM_USERS) | ||
source_id = random.randint(1, self.NUM_SOURCES) | ||
add_reply(self.session, journalist_id, source_id) | ||
|
||
self.session.commit() | ||
|
||
# Mark some files, messages, and replies as seen | ||
sql = "SELECT * FROM files" | ||
files = self.session.execute(text(sql)).fetchall() | ||
for file in files: | ||
if random.choice([0, 1]): | ||
selected_journo_id = random.randint(1, self.NUM_USERS) | ||
mark_file_as_seen(self.session, file.id, selected_journo_id) | ||
|
||
sql = "SELECT * FROM messages" | ||
messages = self.session.execute(text(sql)).fetchall() | ||
for message in messages: | ||
if random.choice([0, 1]): | ||
selected_journo_id = random.randint(1, self.NUM_USERS) | ||
mark_message_as_seen(self.session, message.id, selected_journo_id) | ||
|
||
sql = "SELECT * FROM replies" | ||
replies = self.session.execute(text(sql)).fetchall() | ||
for reply in replies: | ||
if random.choice([0, 1]): | ||
selected_journo_id = random.randint(1, self.NUM_USERS) | ||
mark_reply_as_seen(self.session, reply.id, selected_journo_id) | ||
|
||
self.session.commit() | ||
|
||
def check_downgrade(self): | ||
""" | ||
Check that seen tables no longer exist. | ||
""" | ||
params = {"table_name": "seen_files"} | ||
sql = "SELECT name FROM sqlite_master WHERE type='table' AND name=:table_name;" | ||
seen_files_exists = self.session.execute(text(sql), params).fetchall() | ||
assert not seen_files_exists | ||
params = {"table_name": "seen_messages"} | ||
sql = "SELECT name FROM sqlite_master WHERE type='table' AND name=:table_name;" | ||
seen_messages_exists = self.session.execute(text(sql), params).fetchall() | ||
assert not seen_messages_exists | ||
params = {"table_name": "seen_replies"} | ||
sql = "SELECT name FROM sqlite_master WHERE type='table' AND name=:table_name;" | ||
seen_replies_exists = self.session.execute(text(sql), params).fetchall() | ||
assert not seen_replies_exists |
Oops, something went wrong.