Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

TDL-6737 added pylint and resolved the errors #87

Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
1b1fcf6
added pylint and resolved the errors
namrata270998 Oct 7, 2021
7fcd83f
added pylint in the setup.py
namrata270998 Oct 7, 2021
9251bdc
corrected the pylint command
namrata270998 Oct 7, 2021
cb15949
added package name
namrata270998 Oct 7, 2021
0857924
replaced dev with test in pip install
namrata270998 Oct 7, 2021
d1b1482
resolved the comments
namrata270998 Oct 7, 2021
77cbfd6
removed f-string
namrata270998 Oct 18, 2021
2d3d110
Added covergae report to artifact
prijendev Oct 21, 2021
ebe6c38
removed unittest case report
prijendev Oct 21, 2021
d69a23a
removed config file changes
prijendev Oct 21, 2021
0523a2f
revert back config.yml file changes
prijendev Oct 21, 2021
a448d21
Added back pylint in circleci
prijendev Oct 21, 2021
b752958
resolved pylint error
prijendev Oct 21, 2021
c4f87b9
resolved comments
namrata270998 Oct 27, 2021
ea74d52
Merge branch 'TDL-6737-add-pylint-to-tap-tester' of https://github.co…
namrata270998 Oct 27, 2021
27cc866
removed str
namrata270998 Oct 27, 2021
63a813d
removed logger statements
namrata270998 Oct 27, 2021
f3ff26a
Merge branch 'TDL-10048-Improve-BingAds-Reliability' of https://githu…
namrata270998 Nov 8, 2021
77a0eea
added consider-using-f-string in config.yml
namrata270998 Dec 10, 2021
a59b032
resolved comments
namrata270998 Dec 15, 2021
e8c58ff
Merge branch 'TDL-10048-Improve-BingAds-Reliability' of https://githu…
prijendev Dec 22, 2021
5b9d313
resolved pylint errors
prijendev Dec 22, 2021
fd066e9
pylint fixtures
prijendev Dec 22, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
version: 2.1
orbs:
slack: circleci/slack@3.4.2

jobs:
build:
docker:
Expand All @@ -14,7 +13,12 @@ jobs:
python3 -mvenv /usr/local/share/virtualenvs/tap-bing-ads
source /usr/local/share/virtualenvs/tap-bing-ads/bin/activate
pip install -U 'pip<19.2' 'setuptools<51.0.0'
pip install .[dev]
pip install .[test]
- run:
name: 'pylint'
command: |
source /usr/local/share/virtualenvs/tap-bing-ads/bin/activate
pylint tap_bing_ads -d missing-docstring,line-too-long,invalid-name,super-with-arguments,return-in-init,too-many-arguments,deprecated-method,consider-using-f-string
- add_ssh_keys
- run:
name: 'Integration Tests'
Expand All @@ -25,7 +29,6 @@ jobs:
run-test --tap=tap-bing-ads tests
- slack/notify-on-failure:
only_for_branches: master

workflows:
version: 2
commit:
Expand All @@ -42,4 +45,4 @@ workflows:
- master
jobs:
- build:
context: circleci-user
context: circleci-user
3 changes: 3 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@
'backoff==1.8.0',
],
extras_require={
'test': [
'pylint'
],
'dev': [
'ipdb'
]
Expand Down
90 changes: 41 additions & 49 deletions tap_bing_ads/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,12 @@ class InvalidDateRangeEnd(Exception):
pass

def log_service_call(service_method, account_id):
def wrapper(*args, **kwargs):
log_args = list(map(lambda arg: str(arg).replace('\n', '\\n'), args)) + \
list(map(lambda kv: '{}={}'.format(*kv), kwargs.items()))
LOGGER.info('Calling: {}({}) for account: {}'.format(
service_method.name,
','.join(log_args),
account_id))
def wrapper(*args, **kwargs): # pylint: disable=inconsistent-return-statements
log_args = list(map(lambda arg: str(arg).replace('\n', '\\n'), args)) + list(map(lambda kv: '{}={}'.format(*kv), kwargs.items()))
LOGGER.info('Calling: %s(%s) for account: %s',
service_method.name,
','.join(log_args),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What is the difference between what was before and the changes done?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We have removed format and used %s instead, as pylint was throwing error for format

account_id)
with metrics.http_request_timer(service_method.name):
try:
return service_method(*args, **kwargs)
Expand All @@ -79,7 +78,7 @@ def wrapper(*args, **kwargs):
if oe.ErrorCode == 'InvalidCustomDateRangeEnd']
if any(invalid_date_range_end_errors):
raise InvalidDateRangeEnd(invalid_date_range_end_errors) from e
LOGGER.info('Caught exception for account: {}'.format(account_id))
LOGGER.info('Caught exception for account: %s', account_id)
raise Exception(operation_errors) from e
if hasattr(e.fault.detail, 'AdApiFaultDetail'):
raise Exception(e.fault.detail.AdApiFaultDetail.Errors) from e
Expand Down Expand Up @@ -204,7 +203,7 @@ def get_array_type(array_type):

def get_complex_type_elements(inherited_types, wsdl_type):
## inherited type
if isinstance(wsdl_type.rawchildren[0].rawchildren[0], suds.xsd.sxbasic.Extension):
if isinstance(wsdl_type.rawchildren[0].rawchildren[0], suds.xsd.sxbasic.Extension): # pylint: disable=no-else-return
abstract_base = wsdl_type.rawchildren[0].rawchildren[0].ref[0]
if abstract_base not in inherited_types:
inherited_types[abstract_base] = set()
Expand Down Expand Up @@ -542,7 +541,7 @@ def sync_accounts_stream(account_ids, catalog_item):
singer.write_bookmark(STATE, 'accounts', 'last_record', max_accounts_last_modified)
singer.write_state(STATE)

def sync_campaigns(client, account_id, selected_streams):
def sync_campaigns(client, account_id, selected_streams): # pylint: disable=inconsistent-return-statements
# CampaignType defaults to 'Search', but there are other types of campaigns
response = client.GetCampaignsByAccountId(AccountId=account_id, CampaignType='Search Shopping DynamicSearchAds')
response_dict = sobject_to_dict(response)
Expand All @@ -569,8 +568,8 @@ def sync_ad_groups(client, account_id, campaign_ids, selected_streams):
ad_groups = sobject_to_dict(response)['AdGroup']

if 'ad_groups' in selected_streams:
LOGGER.info('Syncing AdGroups for Account: {}, Campaign: {}'.format(
account_id, campaign_id))
LOGGER.info('Syncing AdGroups for Account: %s, Campaign: %s',
account_id, campaign_id)
selected_fields = get_selected_fields(selected_streams['ad_groups'])
singer.write_schema('ad_groups', get_core_schema(client, 'AdGroup'), ['Id'])
with metrics.record_counter('ad_groups') as counter:
Expand Down Expand Up @@ -608,13 +607,13 @@ def sync_ads(client, selected_streams, ad_group_ids):
def sync_core_objects(account_id, selected_streams):
client = create_sdk_client('CampaignManagementService', account_id)

LOGGER.info('Syncing Campaigns for Account: {}'.format(account_id))
LOGGER.info('Syncing Campaigns for Account: %s', account_id)
campaign_ids = sync_campaigns(client, account_id, selected_streams)

if campaign_ids and ('ad_groups' in selected_streams or 'ads' in selected_streams):
ad_group_ids = sync_ad_groups(client, account_id, campaign_ids, selected_streams)
if 'ads' in selected_streams:
LOGGER.info('Syncing Ads for Account: {}'.format(account_id))
LOGGER.info('Syncing Ads for Account: %s', account_id)
sync_ads(client, selected_streams, ad_group_ids)

def type_report_row(row):
Expand Down Expand Up @@ -644,33 +643,32 @@ async def poll_report(client, account_id, report_name, start_date, end_date, req
download_url = None
with metrics.job_timer('generate_report'):
for i in range(1, MAX_NUM_REPORT_POLLS + 1):
LOGGER.info('Polling report job {}/{} - {} - from {} to {}'.format(
LOGGER.info('Polling report job %s/%s - %s - from %s to %s',
i,
MAX_NUM_REPORT_POLLS,
report_name,
start_date,
end_date))
end_date)
response = client.PollGenerateReport(request_id)
if response.Status == 'Error':
LOGGER.warn(
'Error polling {} for account {} with request id {}'
.format(report_name, account_id, request_id))
LOGGER.warn('Error polling %s for account %s with request id %s',
report_name, account_id, request_id)
return False, None
if response.Status == 'Success':
if response.ReportDownloadUrl:
download_url = response.ReportDownloadUrl
else:
LOGGER.info('No results for report: {} - from {} to {}'.format(
report_name,
start_date,
end_date))
LOGGER.info('No results for report: %s - from %s to %s',
report_name,
start_date,
end_date)
break

if i == MAX_NUM_REPORT_POLLS:
LOGGER.info('Generating report timed out: {} - from {} to {}'.format(
report_name,
start_date,
end_date))
LOGGER.info('Generating report timed out: %s - from %s to %s',
report_name,
start_date,
end_date)
else:
await asyncio.sleep(REPORT_POLL_SLEEP)

Expand All @@ -690,7 +688,6 @@ def stream_report(stream_name, report_name, url, report_time):
if response.status_code != 200:
raise Exception('Non-200 ({}) response downloading report: {}'.format(
response.status_code, report_name))

with ZipFile(io.BytesIO(response.content)) as zip_file:
with zip_file.open(zip_file.namelist()[0]) as binary_file:
with io.TextIOWrapper(binary_file, encoding='utf-8') as csv_file:
Expand All @@ -708,7 +705,7 @@ def stream_report(stream_name, report_name, url, report_time):
counter.increment()

def get_report_interval(state_key):
report_max_days = int(CONFIG.get('report_max_days', 30))
report_max_days = int(CONFIG.get('report_max_days', 30)) # pylint: disable=unused-variable
conversion_window = int(CONFIG.get('conversion_window', -30))

config_start_date = arrow.get(CONFIG.get('start_date'))
Expand Down Expand Up @@ -737,8 +734,8 @@ async def sync_report(client, account_id, report_stream):

start_date, end_date = get_report_interval(state_key)

LOGGER.info('Generating {} reports for account {} between {} - {}'.format(
report_stream.stream, account_id, start_date, end_date))
LOGGER.info('Generating %s reports for account %s between %s - %s',
report_stream.stream, account_id, start_date, end_date)

current_start_date = start_date
while current_start_date <= end_date:
Expand All @@ -752,7 +749,7 @@ async def sync_report(client, account_id, report_stream):
report_stream,
current_start_date,
current_end_date)
except InvalidDateRangeEnd as ex:
except InvalidDateRangeEnd as ex: # pylint: disable=unused-variable
LOGGER.warn("Bing reported that the requested report date range ended outside of "
"their data retention period. Skipping to next range...")
success = True
Expand Down Expand Up @@ -781,7 +778,7 @@ async def sync_report_interval(client, account_id, report_stream,
success, download_url = await poll_report(client, account_id, report_name,
start_date, end_date, request_id)

except Exception as some_error:
except Exception as some_error: # pylint: disable=broad-except,unused-variable
LOGGER.info('The request_id %s for %s is invalid, generating a new one',
request_id,
state_key)
Expand All @@ -795,9 +792,9 @@ async def sync_report_interval(client, account_id, report_stream,
success, download_url = await poll_report(client, account_id, report_name,
start_date, end_date, request_id)

if success and download_url:
LOGGER.info('Streaming report: {} for account {} - from {} to {}'
.format(report_name, account_id, start_date, end_date))
if success and download_url: # pylint: disable=no-else-return
LOGGER.info('Streaming report: %s for account %s - from %s to %s',
report_name, account_id, start_date, end_date)

stream_report(report_stream.stream,
report_name,
Expand All @@ -808,29 +805,26 @@ async def sync_report_interval(client, account_id, report_stream,
singer.write_state(STATE)
return True
elif success and not download_url:
LOGGER.info('No data for report: {} for account {} - from {} to {}'
.format(report_name, account_id, start_date, end_date))
LOGGER.info('No data for report: %s for account %s - from %s to %s',
report_name, account_id, start_date, end_date)
singer.write_bookmark(STATE, state_key, 'request_id', None)
singer.write_bookmark(STATE, state_key, 'date', end_date.isoformat())
singer.write_state(STATE)
return True
else:
LOGGER.info('Unsuccessful request for report: {} for account {} - from {} to {}'
.format(report_name, account_id, start_date, end_date))
LOGGER.info('Unsuccessful request for report: %s for account %s - from %s to %s',
report_name, account_id, start_date, end_date)
singer.write_bookmark(STATE, state_key, 'request_id', None)
singer.write_state(STATE)
return False


def get_report_request_id(client, account_id, report_stream, report_name,
start_date, end_date, state_key, force_refresh=False):

saved_request_id = singer.get_bookmark(STATE, state_key, 'request_id')
if not force_refresh and saved_request_id is not None:
LOGGER.info(
'Resuming polling for account {}: {}'
.format(account_id, report_name)
)
LOGGER.info('Resuming polling for account %s: %s',
account_id, report_name)
return saved_request_id

report_request = build_report_request(client, account_id, report_stream,
Expand All @@ -841,10 +835,8 @@ def get_report_request_id(client, account_id, report_stream, report_name,

def build_report_request(client, account_id, report_stream, report_name,
start_date, end_date):
LOGGER.info(
'Syncing report for account {}: {} - from {} to {}'
.format(account_id, report_name, start_date, end_date)
)
LOGGER.info('Syncing report for account %s: %s - from %s to %s',
account_id, report_name, start_date, end_date)

report_request = client.factory.create('{}Request'.format(report_name))
report_request.Format = 'Csv'
Expand Down