From 82c25750abd57c52b93d45caf300c18d08767669 Mon Sep 17 00:00:00 2001 From: Gurov Ilya Date: Fri, 26 Jul 2019 19:23:13 +0300 Subject: [PATCH] Separate row types to remove confusion around return types of 'row.commit'. (#8662) --- bigtable/docs/snippets_table.py | 119 ++++++++++++++++++++---- bigtable/google/cloud/bigtable/table.py | 62 ++++++++++++ bigtable/tests/unit/test_table.py | 69 ++++++++------ 3 files changed, 206 insertions(+), 44 deletions(-) diff --git a/bigtable/docs/snippets_table.py b/bigtable/docs/snippets_table.py index e52e9469143d..0fbb16bf74ad 100644 --- a/bigtable/docs/snippets_table.py +++ b/bigtable/docs/snippets_table.py @@ -403,11 +403,94 @@ def test_bigtable_table_row(): row2_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1) row2_obj.commit() - actual_rows_keys = [] + written_row_keys = [] for row in table.read_rows(): - actual_rows_keys.append(row.row_key) + written_row_keys.append(row.row_key) - assert actual_rows_keys == row_keys + assert written_row_keys == row_keys + + table.truncate(timeout=300) + + +def test_bigtable_table_append_row(): + # [START bigtable_table_append_row] + from google.cloud.bigtable import Client + + client = Client(admin=True) + instance = client.instance(INSTANCE_ID) + table = instance.table(TABLE_ID) + + row_keys = [b"row_key_1", b"row_key_2"] + row1_obj = table.append_row(row_keys[0]) + row2_obj = table.append_row(row_keys[1]) + # [END bigtable_table_append_row] + + row1_obj.append_cell_value(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1) + row1_obj.commit() + row2_obj.append_cell_value(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1) + row2_obj.commit() + + written_row_keys = [] + for row in table.read_rows(): + written_row_keys.append(row.row_key) + + assert written_row_keys == row_keys + + table.truncate(timeout=300) + + +def test_bigtable_table_direct_row(): + # [START bigtable_table_direct_row] + from google.cloud.bigtable import Client + + client = Client(admin=True) + instance = client.instance(INSTANCE_ID) + table = instance.table(TABLE_ID) + + row_keys = [b"row_key_1", b"row_key_2"] + row1_obj = table.direct_row(row_keys[0]) + row2_obj = table.direct_row(row_keys[1]) + # [END bigtable_table_direct_row] + + row1_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1) + row1_obj.commit() + row2_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1) + row2_obj.commit() + + written_row_keys = [] + for row in table.read_rows(): + written_row_keys.append(row.row_key) + + assert written_row_keys == row_keys + + table.truncate(timeout=300) + + +def test_bigtable_table_conditional_row(): + # [START bigtable_table_conditional_row] + from google.cloud.bigtable import Client + from google.cloud.bigtable.row_filters import PassAllFilter + + client = Client(admin=True) + instance = client.instance(INSTANCE_ID) + table = instance.table(TABLE_ID) + + row_keys = [b"row_key_1", b"row_key_2"] + filter_ = PassAllFilter(True) + row1_obj = table.conditional_row(row_keys[0], filter_=filter_) + row2_obj = table.conditional_row(row_keys[1], filter_=filter_) + # [END bigtable_table_conditional_row] + + row1_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1, state=False) + row1_obj.commit() + row2_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1, state=False) + row2_obj.commit() + + written_row_keys = [] + for row in table.read_rows(): + written_row_keys.append(row.row_key) + + assert written_row_keys == row_keys table.truncate(timeout=300) @@ -910,10 +993,10 @@ def test_bigtable_row_delete(): row_obj = table_row_del.row(b"row_key_1") row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, b"cell-val") row_obj.commit() - actual_rows_keys = [] + written_row_keys = [] for row in table_row_del.read_rows(): - actual_rows_keys.append(row.row_key) - assert actual_rows_keys == [b"row_key_1"] + written_row_keys.append(row.row_key) + assert written_row_keys == [b"row_key_1"] # [START bigtable_row_delete] from google.cloud.bigtable import Client @@ -929,10 +1012,10 @@ def test_bigtable_row_delete(): row_obj.commit() # [END bigtable_row_delete] - actual_rows_keys = [] + written_row_keys = [] for row in table.read_rows(): - actual_rows_keys.append(row.row_key) - assert len(actual_rows_keys) == 0 + written_row_keys.append(row.row_key) + assert len(written_row_keys) == 0 def test_bigtable_row_delete_cell(): @@ -942,10 +1025,10 @@ def test_bigtable_row_delete_cell(): row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1) row_obj.commit() - actual_rows_keys = [] + written_row_keys = [] for row in table_row_del_cell.read_rows(): - actual_rows_keys.append(row.row_key) - assert actual_rows_keys == [row_key1] + written_row_keys.append(row.row_key) + assert written_row_keys == [row_key1] # [START bigtable_row_delete_cell] from google.cloud.bigtable import Client @@ -975,10 +1058,10 @@ def test_bigtable_row_delete_cells(): row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME2, CELL_VAL2) row_obj.commit() - actual_rows_keys = [] + written_row_keys = [] for row in table_row_del_cells.read_rows(): - actual_rows_keys.append(row.row_key) - assert actual_rows_keys == [row_key1] + written_row_keys.append(row.row_key) + assert written_row_keys == [row_key1] # [START bigtable_row_delete_cells] from google.cloud.bigtable import Client @@ -1075,11 +1158,11 @@ def test_bigtable_row_setcell_commit_rowkey(): row_obj.commit() # [END bigtable_row_commit] - actual_rows_keys = [] + written_row_keys = [] for row in table.read_rows(): - actual_rows_keys.append(row.row_key) + written_row_keys.append(row.row_key) - assert actual_rows_keys == [b"row_key_1", b"row_key_2"] + assert written_row_keys == [b"row_key_1", b"row_key_2"] # [START bigtable_row_row_key] from google.cloud.bigtable import Client diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 8a58cd8b6632..4ced9fbde0c2 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -190,6 +190,14 @@ def row(self, row_key, filter_=None, append=False): :raises: :class:`ValueError ` if both ``filter_`` and ``append`` are used. """ + warnings.warn( + "This method will be deprecated in future versions. Please " + "use Table.append_row(), Table.conditional_row() " + "and Table.direct_row() methods instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + if append and filter_ is not None: raise ValueError("At most one of filter_ and append can be set") if append: @@ -199,6 +207,60 @@ def row(self, row_key, filter_=None, append=False): else: return DirectRow(row_key, self) + def append_row(self, row_key): + """Create a :class:`~google.cloud.bigtable.row.AppendRow` associated with this table. + + For example: + + .. literalinclude:: snippets_table.py + :start-after: [START bigtable_table_append_row] + :end-before: [END bigtable_table_append_row] + + Args: + row_key (bytes): The key for the row being created. + + Returns: + A row owned by this table. + """ + return AppendRow(row_key, self) + + def direct_row(self, row_key): + """Create a :class:`~google.cloud.bigtable.row.DirectRow` associated with this table. + + For example: + + .. literalinclude:: snippets_table.py + :start-after: [START bigtable_table_direct_row] + :end-before: [END bigtable_table_direct_row] + + Args: + row_key (bytes): The key for the row being created. + + Returns: + A row owned by this table. + """ + return DirectRow(row_key, self) + + def conditional_row(self, row_key, filter_): + """Create a :class:`~google.cloud.bigtable.row.ConditionalRow` associated with this table. + + For example: + + .. literalinclude:: snippets_table.py + :start-after: [START bigtable_table_conditional_row] + :end-before: [END bigtable_table_conditional_row] + + Args: + row_key (bytes): The key for the row being created. + + filter_ (:class:`.RowFilter`): (Optional) Filter to be used for + conditional mutations. See :class:`.ConditionalRow` for more details. + + Returns: + A row owned by this table. + """ + return ConditionalRow(row_key, self, filter_=filter_) + def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 25f468c730fe..495d8660d1f7 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -180,16 +180,19 @@ def test_constructor_wo_admin(self): self.assertIs(table._instance._client, client) self.assertEqual(table.name, self.TABLE_NAME) - def test_row_factory_direct(self): - from google.cloud.bigtable.row import DirectRow - - credentials = _make_credentials() + def _row_methods_helper(self): client = self._make_client( - project="project-id", credentials=credentials, admin=True + project="project-id", credentials=_make_credentials(), admin=True ) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) row_key = b"row_key" + return table, row_key + + def test_row_factory_direct(self): + from google.cloud.bigtable.row import DirectRow + + table, row_key = self._row_methods_helper() row = table.row(row_key) self.assertIsInstance(row, DirectRow) @@ -199,13 +202,7 @@ def test_row_factory_direct(self): def test_row_factory_conditional(self): from google.cloud.bigtable.row import ConditionalRow - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - row_key = b"row_key" + table, row_key = self._row_methods_helper() filter_ = object() row = table.row(row_key, filter_=filter_) @@ -216,28 +213,48 @@ def test_row_factory_conditional(self): def test_row_factory_append(self): from google.cloud.bigtable.row import AppendRow - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - row_key = b"row_key" + table, row_key = self._row_methods_helper() row = table.row(row_key, append=True) self.assertIsInstance(row, AppendRow) self.assertEqual(row._row_key, row_key) self.assertEqual(row._table, table) + def test_direct_row(self): + from google.cloud.bigtable.row import DirectRow + + table, row_key = self._row_methods_helper() + row = table.direct_row(row_key) + + self.assertIsInstance(row, DirectRow) + self.assertEqual(row._row_key, row_key) + self.assertEqual(row._table, table) + + def test_conditional_row(self): + from google.cloud.bigtable.row import ConditionalRow + + table, row_key = self._row_methods_helper() + filter_ = object() + row = table.conditional_row(row_key, filter_=filter_) + + self.assertIsInstance(row, ConditionalRow) + self.assertEqual(row._row_key, row_key) + self.assertEqual(row._table, table) + + def test_append_row(self): + from google.cloud.bigtable.row import AppendRow + + table, row_key = self._row_methods_helper() + row = table.append_row(row_key) + + self.assertIsInstance(row, AppendRow) + self.assertEqual(row._row_key, row_key) + self.assertEqual(row._table, table) + def test_row_factory_failure(self): - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + table, row_key = self._row_methods_helper() with self.assertRaises(ValueError): - table.row(b"row_key", filter_=object(), append=True) + table.row(row_key, filter_=object(), append=True) def test___eq__(self): credentials = _make_credentials()