Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement pagination for Forms stream #260

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# Changelog

## 3.1.0
* Implement pagination for Forms API endpoint [#260](https://github.com/singer-io/tap-hubspot/pull/260)

## 3.0.0
* Upgrade Owners API endpoint [#256](https://github.com/singer-io/tap-hubspot/pull/256)

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from setuptools import setup

setup(name='tap-hubspot',
version='3.0.0',
version='3.1.0',
description='Singer.io tap for extracting data from the HubSpot API',
author='Stitch',
url='http://singer.io',
Expand Down
30 changes: 21 additions & 9 deletions tap_hubspot/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -995,20 +995,32 @@ def sync_forms(STATE, ctx):

LOGGER.info("sync_forms from %s", start)

data = request(get_url("forms")).json()
time_extracted = utils.now()

with Transformer(UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING) as bumble_bee:
# To handle records updated between start of the table sync and the end,
# store the current sync start in the state and not move the bookmark past this value.
sync_start_time = utils.now()
for row in data:
record = bumble_bee.transform(lift_properties_and_versions(row), schema, mdata)

if record[bookmark_key] >= start:
singer.write_record("forms", record, catalog.get('stream_alias'), time_extracted=time_extracted)
if record[bookmark_key] >= max_bk_value:
max_bk_value = record[bookmark_key]
# Check for pagination
params = {'limit': 100, 'offset': 0}
while True:
LOGGER.info("Limit: %d, Offset: %d", params['limit'], params['offset'])
# Get next page URL
data = request(get_url("forms"), params).json()
time_extracted = utils.now()

for row in data:
record = bumble_bee.transform(lift_properties_and_versions(row), schema, mdata)

if record[bookmark_key] >= start:
singer.write_record("forms", record, catalog.get('stream_alias'), time_extracted=time_extracted)
if record[bookmark_key] >= max_bk_value:
max_bk_value = record[bookmark_key]

# Stop pagination if paginated response returns less records than page limit
if len(data) < params['limit']:
break

params['offset'] += 100

# Don't bookmark past the start of this sync to account for updated records during the sync.
new_bookmark = min(utils.strptime_to_utc(max_bk_value), sync_start_time)
Expand Down
1 change: 1 addition & 0 deletions tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ def expected_metadata(self): # DOCS_BUG https://stitchdata.atlassian.net/browse
self.PRIMARY_KEYS: {"guid"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updatedAt"},
self.EXPECTED_PAGE_SIZE: 100,
self.OBEYS_START_DATE: True
},
"owners": {
Expand Down
1 change: 1 addition & 0 deletions tests/base_hubspot.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ def expected_metadata(cls): # DOCS_BUG https://stitchdata.atlassian.net/browse/
BaseCase.PRIMARY_KEYS: {"guid"},
BaseCase.REPLICATION_METHOD: BaseCase.INCREMENTAL,
BaseCase.REPLICATION_KEYS: {"updatedAt"},
BaseCase.API_LIMIT: 100,
BaseCase.OBEYS_START_DATE: True
},
"owners": {
Expand Down