Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -139,3 +139,6 @@ converge_db/
# google calendar data
events.json
calendar_list.json

# maintain consistent coding styles
.editorconfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
"""include cascading deletion on room tags

Revision ID: c058d462a21d
Revises: 98e4dbfc868a
Create Date: 2019-11-04 17:45:56.163962

"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = 'c058d462a21d'
down_revision = '98e4dbfc868a'
branch_labels = None
depends_on = None


def upgrade():
op.drop_constraint('room_tags_tag_id_fkey',
'room_tags', type_='foreignkey')
op.create_foreign_key(None, 'room_tags', 'tags', [
'tag_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('room_tags_room_id_fkey',
'room_tags', type_='foreignkey')
op.create_foreign_key(None, 'room_tags', 'rooms', [
'room_id'], ['id'], ondelete='CASCADE')


def downgrade():
op.drop_constraint('room_tags_room_id_fkey',
'room_tags', type_='foreignkey')
op.create_foreign_key('room_tags_room_id_fkey', 'room_tags',
'rooms', ['room_id'], ['id'])
op.drop_constraint('room_tags_tag_id_fkey', 'room_tags', type_='foreignkey')
op.create_foreign_key('room_tags_tag_id_fkey', 'room_tags',
'tags', ['tag_id'], ['id'])
26 changes: 13 additions & 13 deletions api/events/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@
from helpers.database import Base
from utilities.utility import Utility, StateType
from helpers.events_filter.events_filter import (
validate_date_input,
format_range_dates,
validate_date_input,
format_range_dates,
)


class Events(Base, Utility):
__tablename__ = 'events'
id = Column(Integer, Sequence('events_id_seq', start=1, increment=1), primary_key=True) # noqa
id = Column(Integer, Sequence('events_id_seq', start=1, increment=1), primary_key=True) # noqa
event_id = Column(String, nullable=False)
room_id = Column(Integer, ForeignKey('rooms.id', ondelete="CASCADE"))
event_title = Column(String, nullable=True)
Expand All @@ -38,19 +38,19 @@ def filter_events_by_date_range(query, start_date, end_date):
validate_date_input(start_date, end_date)
if not start_date and not end_date:
events = query.filter(
Events.state == 'active'
).all()
Events.state == 'active'
).all()
if not events:
raise GraphQLError('Events do not exist')
return events

start_date, end_date = format_range_dates(start_date, end_date)

events = query.filter(
Events.state == 'active',
Events.start_time >= start_date,
Events.end_time <= end_date
).all()
Events.state == 'active',
Events.start_time >= start_date,
Events.end_time <= end_date
).all()
if not events:
raise GraphQLError('Events do not exist for the date range')
return events
Expand All @@ -64,9 +64,9 @@ def filter_event_by_room(room_id, start_date, end_date):
validate_date_input(start_date, end_date)
if not start_date and not end_date:
events = Events.query.filter_by(
room_id = room_id,
state = 'active'
).all()
room_id=room_id,
state='active'
).all()
if not events:
raise GraphQLError('Events do not exist')
return events
Expand All @@ -78,7 +78,7 @@ def filter_event_by_room(room_id, start_date, end_date):
Events.state == 'active',
Events.start_time >= start_date,
Events.end_time <= end_date
).all()
).all()
if not events:
raise GraphQLError('Events do not exist for the date range')
return events
39 changes: 20 additions & 19 deletions api/events/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ class Arguments:
def mutate(self, info, **kwargs):
room_id, event = check_event_in_db(self, info, "checked_in", **kwargs)
if kwargs.get('check_in_time'):
update_device_last_activity(info, room_id, kwargs['check_in_time'], 'check in')
update_device_last_activity(
info, room_id, kwargs['check_in_time'], 'check in')
if not event:
event = EventsModel(
event_id=kwargs['event_id'],
Expand Down Expand Up @@ -85,10 +86,11 @@ def mutate(self, info, **kwargs):
room_id, event = check_event_in_db(self, info, "cancelled", **kwargs)
try:
device_last_seen = parser.parse(
kwargs['start_time']) + timedelta(minutes=10)
kwargs['start_time']) + timedelta(minutes=10)
except ValueError:
raise GraphQLError("Invalid start time")
update_device_last_activity(info, room_id, device_last_seen, 'cancel meeting')
update_device_last_activity(
info, room_id, device_last_seen, 'cancel meeting')
if not event:
event = EventsModel(
event_id=kwargs['event_id'],
Expand All @@ -102,15 +104,15 @@ def mutate(self, info, **kwargs):
auto_cancelled=True)
event.save()
calendar_event = get_single_calendar_event(
kwargs['calendar_id'],
kwargs['event_id']
)
kwargs['calendar_id'],
kwargs['event_id']
)
event_reject_reason = 'after 10 minutes'
if not notification.event_cancellation_notification(
calendar_event,
room_id,
event_reject_reason
):
calendar_event,
room_id,
event_reject_reason
):
raise GraphQLError("Event cancelled but email not sent")
return CancelEvent(event=event)

Expand All @@ -131,12 +133,13 @@ class Arguments:
def mutate(self, info, **kwargs):
room_id, event = check_event_in_db(self, info, "ended", **kwargs)
if kwargs.get('meeting_end_time'):
update_device_last_activity(info, room_id, kwargs['meeting_end_time'], 'end meeting')
update_device_last_activity(
info, room_id, kwargs['meeting_end_time'], 'end meeting')
if not event:
event = EventsModel(
event_id=kwargs['event_id'],
meeting_end_time=kwargs['meeting_end_time']
)
)
event.save()

return EndEvent(event=event)
Expand Down Expand Up @@ -264,7 +267,7 @@ class Query(graphene.ObjectType):
\n- end_date: The date and time to end selection in range \
when filtering by the time period\
\n- page: Page number to select when paginating\
\n- per_page: The maximum number of events per page when paginating") # noqa
\n- per_page: The maximum number of events per page when paginating") # noqa

all_events_by_room = graphene.Field(
RoomEvents,
Expand All @@ -278,7 +281,6 @@ class Query(graphene.ObjectType):
\n- end_date: The date and time to end selection in range \
when filtering by the time period")


@Auth.user_roles('Admin', 'Default User', 'Super Admin')
def resolve_all_events(self, info, **kwargs):
start_date = kwargs.get('start_date')
Expand All @@ -289,7 +291,7 @@ def resolve_all_events(self, info, **kwargs):
query = Events.get_query(info)
response = filter_events_by_date_range(
query, start_date, end_date
)
)
sort_events_by_date(response)

if page and per_page:
Expand All @@ -310,7 +312,6 @@ def resolve_all_events(self, info, **kwargs):
pages=pages)

return PaginateEvents(events=response)


@Auth.user_roles('Admin', 'Super Admin')
def resolve_all_events_by_room(self, info, **kwargs):
Expand All @@ -319,13 +320,13 @@ def resolve_all_events_by_room(self, info, **kwargs):
start_date = kwargs.get('start_date')
end_date = kwargs.get('end_date')
room = RoomModel.query.filter_by(
calendar_id = calendar_id
).first()
calendar_id=calendar_id
).first()
if not room:
raise GraphQLError("No rooms with the given CalendarId")
response = filter_event_by_room(
room.id, start_date, end_date
)
)
sort_events_by_date(response)

return RoomEvents(events=response)
8 changes: 8 additions & 0 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
from celery.schedules import crontab
basedir = os.path.abspath(os.path.dirname(__file__))


Expand All @@ -18,6 +19,13 @@ class Config:
# Celery configuration
CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL')
CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND')
CELERY_IMPORTS = ["services.data_deletion.clean_archived_data"]
CELERYBEAT_SCHEDULE = {
'clean_archived_data': {
'task': 'clean_archived_data.delete_archived_data',
'schedule': crontab(hour=23, minute=00)
},
}

@staticmethod
def init_app(app):
Expand Down
2 changes: 1 addition & 1 deletion docker/dev/start_redis.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
#done
cd /app
export $(cat .env | xargs)
celery worker -A cworker.celery --loglevel=info
celery worker -A cworker.celery --loglevel=info & celery beat -A cworker.celery --schedule=/tmp/celerybeat-schedule --loglevel=info --pidfile=/tmp/celerybeat.pid
53 changes: 27 additions & 26 deletions fixtures/events/events_by_room_fixtures.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
query_all_events_by_room_with_dates = '''
query{
allEventsByRoom(calendarId:"andela.com_3630363835303531343031@resource.calendar.google.com", startDate:"Jul 10 2018",
allEventsByRoom(calendarId:"andela.com_3630363835303531343031@resource.calendar.google.com",
startDate:"Jul 10 2018",
endDate:"Jul 13 2018" ){
events{
eventTitle
Expand All @@ -10,22 +11,22 @@
}
}
}

'''

query_all_events_by_room_with_dates_response = {
"data": {
"allEventsByRoom": {
"events": [
{
"eventTitle": "Onboarding",
"roomId": 1,
"startTime":"2018-07-11T09:00:00Z",
"endTime":"2018-07-11T09:45:00Z",
"data": {
"allEventsByRoom": {
"events": [
{
"eventTitle": "Onboarding",
"roomId": 1,
"startTime": "2018-07-11T09:00:00Z",
"endTime": "2018-07-11T09:45:00Z",
}
]
}
]
}
}
}

query_all_events_by_room_without_dates = '''
Expand All @@ -39,22 +40,22 @@
}
}
}

'''

query_all_events_by_room_without_dates_response = {
"data": {
"allEventsByRoom": {
"events": [
{
"eventTitle": "Onboarding",
"roomId": 1,
"startTime":"2018-07-11T09:00:00Z",
"endTime":"2018-07-11T09:45:00Z",
"data": {
"allEventsByRoom": {
"events": [
{
"eventTitle": "Onboarding",
"roomId": 1,
"startTime": "2018-07-11T09:00:00Z",
"endTime": "2018-07-11T09:45:00Z",
}
]
}
]
}
}
}

query_all_events_by_room_without_callendar_id = '''
Expand All @@ -68,10 +69,10 @@
}
}
}

'''

query_all_events_by_room_with_invalid_calendar_id = '''
query_all_events_by_room_with_invalid_calendar_id = '''
query{
allEventsByRoom(calendarId:"andela.com_36303638353035313430@resource.calendar.google.com"){
events{
Expand All @@ -82,5 +83,5 @@
}
}
}

'''
25 changes: 25 additions & 0 deletions services/data_deletion/clean_archived_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from helpers.database import database_uri
from sqlalchemy import create_engine, MetaData, and_
from datetime import timedelta, datetime
import celery


@celery.task(name='clean_archived_data.delete_archived_data')
def delete_archived_data():
"""
This method deletes data that has been deleted for
more than 30 days
"""
database_engine = create_engine(database_uri)

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

would you please put in a docstring which explains what this function, specifically to delete data which are 30 days old with also an archived state.

metadata = MetaData()
metadata.reflect(bind=database_engine)

for table in reversed(metadata.sorted_tables):
try:
now = datetime.now()
delta = now - timedelta(days=30)
statement = table.delete().where(
and_(table.c.date_updated < delta, table.c.state == 'archived'))

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@mifeille after finishing the job, this is what celery.err.log is saying sqlalchemy.exc.IntegrityError: (psycopg2.IntegrityError) update or delete on table "rooms" violates foreign key constraint "room_tags_room_id_fkey" on table "room_tags" DETAIL: Key (id)=(1) is still referenced from table "room_tags". [SQL: 'DELETE FROM rooms WHERE rooms.date_updated < %(date_updated_1)s AND rooms.state = %(state_1)s'] [parameters: {'date_updated_1': datetime.datetime(2019, 10, 5, 14, 31, 1, 378610), 'state_1': 'archived'}] (Background on this error at: http://sqlalche.me/e/gkpj)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a migration to include cascading deletion I guess that one on room tags was forgotten, I am going to add it.

database_engine.execute(statement)
except AttributeError:
continue
Loading