Skip to content
This repository has been archived by the owner on Apr 4, 2023. It is now read-only.

Commit

Permalink
Merge pull request #58 from cmc333333/delete-commands
Browse files Browse the repository at this point in the history
Respond to DELETE requests
  • Loading branch information
tadhg-ohiggins authored Jul 15, 2016
2 parents 8284953 + af55411 commit dd846a8
Show file tree
Hide file tree
Showing 15 changed files with 170 additions and 83 deletions.
27 changes: 18 additions & 9 deletions regcore/db/django_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,17 @@ def _transform(self, reg, doc_type, version=None):
root=(len(reg['label']) == 1),
)

def bulk_put(self, regs, doc_type, root_label, version):
"""Store all reg objects"""
def bulk_delete(self, doc_type, root_label, version):
"""Delete all documents that match these params"""
# This does not handle subparts. Ignoring that for now
Document.objects.filter(
version=version,
doc_type=doc_type,
label_string__startswith=root_label,
).delete()

def bulk_insert(self, regs, doc_type, version):
"""Store all document objects"""
treeify(regs[0], Document.objects._get_next_tree_id())
Document.objects.bulk_create(
[self._transform(r, doc_type, version) for r in regs],
Expand Down Expand Up @@ -130,12 +133,15 @@ def _transform(self, layer, layer_name, doc_type):
return Layer(name=layer_name, layer=layer, doc_type=doc_type,
doc_id=doc_id)

def bulk_put(self, layers, layer_name, doc_type, root_doc_id):
"""Store all layer objects"""
def bulk_delete(self, layer_name, doc_type, root_doc_id):
"""Delete all layer data matching the parameters"""
# This does not handle subparts; Ignoring that for now
# @todo - use regex to avoid deleting 222-11 when replacing 22
Layer.objects.filter(name=layer_name, doc_type=doc_type,
doc_id__startswith=root_doc_id).delete()

def bulk_insert(self, layers, layer_name, doc_type):
"""Store all layer objects"""
Layer.objects.bulk_create(
[self._transform(l, layer_name, doc_type) for l in layers],
batch_size=settings.BATCH_SIZE)
Expand All @@ -152,10 +158,11 @@ def get(self, name, doc_type, doc_id):

class DMNotices(interface.Notices):
"""Implementation of Django-models as notice backend"""
def put(self, doc_number, notice):
"""Store a single notice"""
def delete(self, doc_number):
Notice.objects.filter(document_number=doc_number).delete()

def insert(self, doc_number, notice):
"""Store a single notice"""
model = Notice(document_number=doc_number,
fr_url=notice['fr_url'],
publication_date=notice['publication_date'],
Expand Down Expand Up @@ -192,13 +199,15 @@ def listing(self, part=None):

class DMDiffs(interface.Diffs):
"""Implementation of Django-models as diff backend"""
def put(self, label, old_version, new_version, diff):
def insert(self, label, old_version, new_version, diff):
"""Store a diff between two versions of a regulation node"""
Diff.objects.filter(label=label, old_version=old_version,
new_version=new_version).delete()
Diff(label=label, old_version=old_version, new_version=new_version,
diff=diff).save()

def delete(self, label, old_version, new_version):
Diff.objects.filter(label=label, old_version=old_version,
new_version=new_version).delete()

def get(self, label, old_version, new_version):
"""Find the associated diff"""
try:
Expand Down
21 changes: 15 additions & 6 deletions regcore/db/es.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Each of the data structures relevant to the API (regulations, notices,
etc.), implemented using Elastic Search as a data store"""
import logging

from django.conf import settings
from pyelasticsearch import ElasticSearch
Expand All @@ -8,6 +9,9 @@
from regcore.db import interface


logger = logging.getLogger(__name__)


def sanitize_doc_id(doc_id):
"""Not strictly required, but remove slashes from Elastic Search ids"""
return ':'.join(doc_id.split('/'))
Expand All @@ -27,6 +31,12 @@ def safe_fetch(self, doc_type, id):
except ElasticHttpNotFoundError:
return None

def bulk_delete(self, *args, **kwarg):
logger.warning("Elastic Search backend doesn't handle deletes")

def delete(self, *args, **kwarg):
logger.warning("Elastic Search backend doesn't handle deletes")


class ESDocuments(ESBase, interface.Documents):
"""Implementation of Elastic Search as regulations backend"""
Expand Down Expand Up @@ -55,7 +65,7 @@ def _transform(self, reg, doc_type, version):
)
return node

def bulk_put(self, regs, doc_type, root_label, version):
def bulk_insert(self, regs, doc_type, version):
"""Store all reg objects"""
self.es.bulk_index(
settings.ELASTIC_SEARCH_INDEX, 'reg_tree',
Expand Down Expand Up @@ -84,9 +94,8 @@ def _transform(self, layer, layer_name, doc_type):
doc_id = sanitize_doc_id(layer.pop('doc_id'))
return {'id': ':'.join([layer_name, doc_type, doc_id]), 'layer': layer}

def bulk_put(self, layers, layer_name, doc_type, root_doc_id):
"""Store all layer objects. Note this does not delete existing docs;
it only replaces/inserts docs, which has loop holes"""
def bulk_insert(self, layers, layer_name, doc_type):
"""Store all layer objects."""
self.es.bulk_index(
settings.ELASTIC_SEARCH_INDEX, 'layer',
[self._transform(l, layer_name, doc_type) for l in layers])
Expand All @@ -101,7 +110,7 @@ def get(self, name, doc_type, doc_id):

class ESNotices(ESBase, interface.Notices):
"""Implementation of Elastic Search as notice backend"""
def put(self, doc_number, notice):
def insert(self, doc_number, notice):
"""Store a single notice"""
self.es.index(settings.ELASTIC_SEARCH_INDEX, 'notice', notice,
id=doc_number)
Expand Down Expand Up @@ -133,7 +142,7 @@ class ESDiffs(ESBase, interface.Diffs):
def to_id(label, old, new):
return "%s/%s/%s" % (label, old, new)

def put(self, label, old_version, new_version, diff):
def insert(self, label, old_version, new_version, diff):
"""Store a diff between two versions of a regulation node"""
struct = {
'label': label,
Expand Down
38 changes: 29 additions & 9 deletions regcore/db/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,13 @@ def get(self, doc_type, label, version):
raise NotImplementedError

@abc.abstractmethod
def bulk_put(self, regs, doc_type, root_label, version):
"""Add many entries, with a root of root_label. Each should have the
provided version"""
def bulk_delete(self, doc_type, root_label, version):
"""Delete all documents that match these parameters"""
raise NotImplementedError

@abc.abstractmethod
def bulk_insert(self, regs, doc_type, version):
"""Add many entries, each with the provided version"""
raise NotImplementedError

@abc.abstractmethod
Expand All @@ -26,15 +30,21 @@ def listing(self, doc_type, label=None):

@six.add_metaclass(abc.ABCMeta)
class Layers(object):
def bulk_put(self, layers, layer_name, doc_type, root_doc_id):
def bulk_delete(self, layer_name, doc_type, root_doc_id):
"""Deletes multiple entries with the same layer_name.
:param str layer_name: Identifier for this layer, e.g. "toc",
"internal-citations", etc.
:param str doc_type: layers are keyed by doc_type
:param str root_doc_id: the doc id of the "root" layer."""
raise NotImplementedError

def bulk_insert(self, layers, layer_name, doc_type):
"""Add multiple entries with the same layer_name.
:param list[dict] layers: Each dictionary represents a layer; each
should have a distinct "doc_id", which will be used during insertion.
:param str layer_name: Identifier for this layer, e.g. "toc",
"internal-citations", etc.
:param str doc_type: layers are keyed by doc_type
:param str root_doc_id: the doc id of the "root" layer. This is used
to delete existing data before inserting"""
:param str doc_type: layers are keyed by doc_type"""
raise NotImplementedError

def get(self, name, doc_type, doc_id):
Expand All @@ -44,7 +54,11 @@ def get(self, name, doc_type, doc_id):

@six.add_metaclass(abc.ABCMeta)
class Notices(object):
def put(self, doc_number, notice):
def delete(self, doc_number):
""":param str doc_number:"""
raise NotImplementedError

def insert(self, doc_number, notice):
""":param str doc_number:
:param dict notice:"""
raise NotImplementedError
Expand All @@ -60,7 +74,13 @@ def listing(self, part=None):

@six.add_metaclass(abc.ABCMeta)
class Diffs(object):
def put(self, label, old_version, new_version, diff):
def delete(self, label, old_version, new_version):
""":param str label:
:param str old_version:
:param str new_version:"""
raise NotImplementedError

def insert(self, label, old_version, new_version, diff):
""":param str label:
:param str old_version:
:param str new_version:
Expand Down
28 changes: 16 additions & 12 deletions regcore/tests/db_django_models_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def test_listing(self):
results = self.dmr.listing('cfr')
self.assertEqual([('ver1', '1111'), ('ver2', '1111')], results)

def test_bulk_put(self):
def test_bulk_insert(self):
"""Writing multiple documents should save correctly. They can be
modified. The lft and rght ids assigned by the Modified Preorder Tree
Traversal algorithm are shown below:
Expand All @@ -90,11 +90,12 @@ def test_bulk_put(self):
n2['parent'] = root
n3['parent'] = root
nodes = [root, n2, n3]
self.dmr.bulk_put(nodes, 'cfr', '111', 'verver')
self.dmr.bulk_insert(nodes, 'cfr', 'verver')
self.assertEqual(DMDocuments().get('cfr', '111', 'verver'), original)

root['title'] = original['title'] = 'New Title'
self.dmr.bulk_put(nodes, 'cfr', '111', 'verver')
self.dmr.bulk_delete('cfr', '111', 'verver')
self.dmr.bulk_insert(nodes, 'cfr', 'verver')

self.assertEqual(DMDocuments().get('cfr', '111', 'verver'), original)

Expand All @@ -113,12 +114,12 @@ def test_get_success(self):
self.assertEqual({"some": 'body'},
self.dml.get('namnam', 'cfr', 'verver/lablab'))

def test_bulk_put(self):
def test_bulk_insert(self):
"""Writing multiple documents should save correctly. They can be
modified"""
layers = [{'111-22': [], '111-22-a': [], 'doc_id': 'verver/111-22'},
{'111-23': [], 'doc_id': 'verver/111-23'}]
self.dml.bulk_put(layers, 'name', 'cfr', 'verver/111')
self.dml.bulk_insert(layers, 'name', 'cfr')

self.assertEqual(Layer.objects.count(), 2)
self.assertEqual(self.dml.get('name', 'cfr', 'verver/111-22'),
Expand All @@ -127,7 +128,8 @@ def test_bulk_put(self):
{'111-23': []})

layers[1] = {'111-23': [1], 'doc_id': 'verver/111-23'}
self.dml.bulk_put(layers, 'name', 'cfr', 'verver/111')
self.dml.bulk_delete('name', 'cfr', 'verver/111')
self.dml.bulk_insert(layers, 'name', 'cfr')

self.assertEqual(Layer.objects.count(), 2)
self.assertEqual(self.dml.get('name', 'cfr', 'verver/111-23'),
Expand Down Expand Up @@ -169,14 +171,14 @@ def test_listing(self):
self.assertEqual(self.dmn.listing(), self.dmn.listing('876'))
self.assertEqual([], self.dmn.listing('888'))

def test_put(self):
def test_insert(self):
"""We can insert and replace a notice"""
doc = {"some": "structure",
'effective_on': '2011-01-01',
'fr_url': 'http://example.com',
'publication_date': '2010-02-02',
'cfr_parts': ['222']}
self.dmn.put('docdoc', doc)
self.dmn.insert('docdoc', doc)

expected = {"document_number": "docdoc",
"effective_on": date(2011, 1, 1),
Expand All @@ -189,7 +191,8 @@ def test_put(self):
[expected])

doc['fr_url'] = 'url2'
self.dmn.put('docdoc', doc)
self.dmn.delete('docdoc')
self.dmn.insert('docdoc', doc)

expected['fr_url'] = 'url2'
six.assertCountEqual(self, Notice.objects.all().values(*fields),
Expand All @@ -210,17 +213,18 @@ def test_get_success(self):
self.assertEqual({"some": 'body'},
self.dmd.get('lablab', 'oldold', 'newnew'))

def test_put(self):
def test_insert_delete(self):
"""We can insert and replace a diff"""
self.dmd.put('lablab', 'oldold', 'newnew', {"some": "structure"})
self.dmd.insert('lablab', 'oldold', 'newnew', {"some": "structure"})

expected = {"label": "lablab", "old_version": "oldold",
"new_version": "newnew", "diff": {"some": "structure"}}
fields = expected.keys()
six.assertCountEqual(self, Diff.objects.all().values(*fields),
[expected])

self.dmd.put('lablab', 'oldold', 'newnew', {"other": "structure"})
self.dmd.delete('lablab', 'oldold', 'newnew')
self.dmd.insert('lablab', 'oldold', 'newnew', {"other": "structure"})
expected['diff'] = {'other': 'structure'}
six.assertCountEqual(self, Diff.objects.all().values(*fields),
[expected])
Loading

0 comments on commit dd846a8

Please sign in to comment.