Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Importcontent manifest file v015 #25

Draft
wants to merge 8 commits into
base: release-v0.15.x
Choose a base branch
from
Prev Previous commit
Differentiate between node_ids as None, and as []
When node_ids is set to an empty list, importcontent will understand
that to mean "no nodes", which is different from when node_ids is unset.
  • Loading branch information
dylanmccall authored and danigm committed Jun 30, 2022
commit 5285c161d8a1cf55d4d8ad5bd2c4dcabc70f4010
4 changes: 2 additions & 2 deletions kolibri/core/content/management/commands/importcontent.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def add_arguments(self, parser):
"--node_ids",
"-n",
# Split the comma separated string we get, into a list of strings
type=lambda x: x.split(","),
type=lambda x: x.split(",") if x else [],
default=None,
required=False,
dest="node_ids",
Expand All @@ -114,7 +114,7 @@ def add_arguments(self, parser):
parser.add_argument(
"--exclude_node_ids",
# Split the comma separated string we get, into a list of string
type=lambda x: x.split(","),
type=lambda x: x.split(",") if x else [],
default=None,
required=False,
dest="exclude_node_ids",
Expand Down
92 changes: 83 additions & 9 deletions kolibri/core/content/test/test_import_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,31 @@ def test_with_node_ids(self):
available=True,
)
.filter(Q(parent=self.c2_node_id) | Q(pk=self.c1_node_id))
.exclude(Q(pk=self.c2c3_node_id))
.exclude(kind=content_kinds.TOPIC)
)

matched_nodes_queries_list = get_import_export_nodes(
self.the_channel_id,
renderable_only=False,
node_ids=[
self.c2_node_id,
self.c1_node_id,
],
)

self.assertCountEqual(
itertools.chain.from_iterable(matched_nodes_queries_list),
expected_content_nodes,
)

def test_with_node_ids_and_exclude_node_ids(self):
expected_content_nodes = list(
ContentNode.objects.filter(
channel_id=self.the_channel_id,
available=True,
)
.filter(Q(parent=self.c2_node_id) | Q(pk=self.c1_node_id))
.exclude(pk=self.c2c3_node_id)
.exclude(kind=content_kinds.TOPIC)
)

Expand All @@ -153,6 +177,56 @@ def test_with_node_ids(self):
expected_content_nodes,
)

def test_with_node_ids_equals_exclude_node_ids(self):
expected_content_nodes = list()

matched_nodes_queries_list = get_import_export_nodes(
self.the_channel_id,
renderable_only=False,
node_ids=[self.c1_node_id],
exclude_node_ids=[self.c1_node_id],
)

self.assertCountEqual(
itertools.chain.from_iterable(matched_nodes_queries_list),
expected_content_nodes,
)

def test_with_node_ids_none(self):
expected_content_nodes = list(
ContentNode.objects.filter(
channel_id=self.the_channel_id,
available=True,
).exclude(kind=content_kinds.TOPIC)
)

matched_nodes_queries_list = get_import_export_nodes(
self.the_channel_id,
renderable_only=False,
node_ids=None,
exclude_node_ids=None,
)

self.assertCountEqual(
itertools.chain.from_iterable(matched_nodes_queries_list),
expected_content_nodes,
)

def test_with_node_ids_empty(self):
expected_content_nodes = list()

matched_nodes_queries_list = get_import_export_nodes(
self.the_channel_id,
renderable_only=False,
node_ids=[],
exclude_node_ids=None,
)

self.assertCountEqual(
itertools.chain.from_iterable(matched_nodes_queries_list),
expected_content_nodes,
)

@patch(
"kolibri.core.content.utils.import_export_content.get_channel_stats_from_disk"
)
Expand Down Expand Up @@ -1709,7 +1783,7 @@ def test_all_nodes_present_disk_renderable_only(self, channel_stats_mock):
}
channel_stats_mock.return_value = stats
_, files_to_transfer, _ = get_import_export_data(
self.the_channel_id, [], [], False, renderable_only=True, drive_id="1"
self.the_channel_id, None, None, False, renderable_only=True, drive_id="1"
)
self.assertEqual(
len(files_to_transfer),
Expand All @@ -1732,7 +1806,7 @@ def test_all_nodes_present_disk(self, channel_stats_mock):
}
channel_stats_mock.return_value = stats
_, files_to_transfer, _ = get_import_export_data(
self.the_channel_id, [], [], False, renderable_only=False, drive_id="1"
self.the_channel_id, None, None, False, renderable_only=False, drive_id="1"
)
self.assertEqual(
len(files_to_transfer), LocalFile.objects.filter(available=False).count()
Expand All @@ -1748,7 +1822,7 @@ def test_one_node_present_disk(self, channel_stats_mock):
stats = {obj.id: {}}
channel_stats_mock.return_value = stats
_, files_to_transfer, _ = get_import_export_data(
self.the_channel_id, [], [], False, renderable_only=False, drive_id="1"
self.the_channel_id, None, None, False, renderable_only=False, drive_id="1"
)
self.assertEqual(len(files_to_transfer), obj.files.count())

Expand All @@ -1765,7 +1839,7 @@ def test_include_one_available_nodes_disk(self, channel_stats_mock):
_, files_to_transfer, _ = get_import_export_data(
self.the_channel_id,
[parent.id],
[],
None,
False,
renderable_only=False,
drive_id="1",
Expand All @@ -1781,7 +1855,7 @@ def test_no_nodes_present_disk(self, channel_stats_mock):
stats = {}
channel_stats_mock.return_value = stats
_, files_to_transfer, _ = get_import_export_data(
self.the_channel_id, [], [], False, renderable_only=False, drive_id="1"
self.the_channel_id, None, None, False, renderable_only=False, drive_id="1"
)
self.assertEqual(len(files_to_transfer), 0)

Expand All @@ -1796,7 +1870,7 @@ def test_all_nodes_present_peer(self, channel_stats_mock):
}
channel_stats_mock.return_value = stats
_, files_to_transfer, _ = get_import_export_data(
self.the_channel_id, [], [], False, renderable_only=False, peer_id="1"
self.the_channel_id, None, None, False, renderable_only=False, peer_id="1"
)
self.assertEqual(
len(files_to_transfer), LocalFile.objects.filter(available=False).count()
Expand All @@ -1812,7 +1886,7 @@ def test_one_node_present_peer(self, channel_stats_mock):
stats = {obj.id: {}}
channel_stats_mock.return_value = stats
_, files_to_transfer, _ = get_import_export_data(
self.the_channel_id, [], [], False, renderable_only=False, peer_id="1"
self.the_channel_id, None, None, False, renderable_only=False, peer_id="1"
)
self.assertEqual(len(files_to_transfer), obj.files.count())

Expand All @@ -1825,7 +1899,7 @@ def test_no_nodes_present_peer(self, channel_stats_mock):
stats = {}
channel_stats_mock.return_value = stats
_, files_to_transfer, _ = get_import_export_data(
self.the_channel_id, [], [], False, renderable_only=False, peer_id="1"
self.the_channel_id, None, None, False, renderable_only=False, peer_id="1"
)
self.assertEqual(len(files_to_transfer), 0)

Expand Down
8 changes: 6 additions & 2 deletions kolibri/core/content/utils/import_export_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,10 @@ def get_import_export_nodes( # noqa: C901
Returns a list of queries for ContentNode objects matching the given
constraints. This can be used with get_content_nodes_data and with
get_content_nodes_selectors.

There is a distinction between calling this function with node_ids=[] and
with node_ids=None. With an empty list, no nodes will be selected. With a
value of None, all nodes will be selected.
"""

nodes_queries_list = []
Expand Down Expand Up @@ -192,7 +196,7 @@ def get_import_export_nodes( # noqa: C901
nodes_query = nodes_to_include

# if requested, filter down to only include particular topics/nodes
if node_ids:
if node_ids is not None:
nodes_query = nodes_query.filter_by_uuids(
_mptt_descendant_ids(
channel_id, node_ids, min_boundary, min_boundary + dynamic_chunksize
Expand All @@ -204,7 +208,7 @@ def get_import_export_nodes( # noqa: C901
nodes_query = nodes_query.filter(renderable_contentnodes_q_filter)

# filter down the query to remove files associated with nodes we've specifically been asked to exclude
if exclude_node_ids:
if exclude_node_ids is not None:
nodes_query = nodes_query.order_by().exclude_by_uuids(
_mptt_descendant_ids(
channel_id,
Expand Down