Skip to content

Commit 9ad2f6f

Browse files
committed
changes to CLI architecture
1 parent 8beb10d commit 9ad2f6f

File tree

2 files changed

+28
-39
lines changed

2 files changed

+28
-39
lines changed

cli.py

Lines changed: 11 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ def cli_set_arguments():
1010
parser = argparse.ArgumentParser(description="Scrap data from thingiverse")
1111
parser.add_argument('type', type=str, metavar='{Thing, Make, User, Remix, APIs, All}',
1212
help='Type of data to scrap', default='Thing')
13-
parser.add_argument('-I', '--Interactive', help='Opens program in interactive mode', action='store_true')
13+
# parser.add_argument('-I', '--Interactive', help='Opens program in interactive mode', action='store_true')
1414
parser.add_argument('-n', '--num_items', help='How many items to scrape', type=int, default=500)
1515
parser.add_argument('-N', '--Name', help='change name of save file (for local save)',
1616
type=str, default=pconf.def_save_name)
@@ -19,13 +19,10 @@ def cli_set_arguments():
1919
parser.add_argument('-D', '--Driver', help='Driver path (for salenium)',
2020
type=str, default=pconf.driver_path)
2121
parser.add_argument('-J', '--save-json', help='save a copy of results as a json file', action='store_true')
22-
# TODO: implement different search parameters
23-
parser.add_argument('-O', '--Order-parameter', metavar='{Test1, Test2}',
24-
help='The order of things on the search page', default='Test1')
2522
parser.add_argument('-S', '--pre-search', type=int, default=0,
2623
help='When scraping for a non-thing type object can first scrape for things, and then scrape '
2724
'for data based on result. Please provide number of pages to scrape')
28-
parser.add_argument('-V', '--volume', type=int, default=0,
25+
parser.add_argument('-v', '--volume', type=int, default=0,
2926
help='Set how much info is printed out: '
3027
'10 = quite, '
3128
'20 = normal, '
@@ -34,23 +31,18 @@ def cli_set_arguments():
3431
parser.add_argument('--google-app-name', help='google developer code used to access google APIs',
3532
type=str, default=pconf.google_ktree_API_key)
3633

37-
gr_volume = parser.add_mutually_exclusive_group()
38-
# volume of CLI output
39-
gr_volume.add_argument('-q', '--quiet', help='Print quiet', action='store_true')
40-
gr_volume.add_argument('-v', '--verbose', help='Print verbose', action='store_true')
41-
4234
gr_data = parser.add_mutually_exclusive_group()
4335
# where to load data from at the start of the run
4436
gr_data.add_argument('-j', '--load-json', help='loads a json save file', action='store_true')
45-
gr_data.add_argument('-d', '--load-db', help='(el) Loads json save', action='store_true')
37+
# gr_data.add_argument('-d', '--load-db', help='(el) Loads json save', action='store_true')
4638

47-
gr_db = parser.add_mutually_exclusive_group()
48-
# how to save results to db
49-
gr_db.add_argument('-u', '--update', help='replace duplicates', action='store_true')
50-
gr_db.add_argument('-a', '--append', help='ignore duplicates', action='store_true')
51-
gr_db.add_argument('-p', '--print', help="don't save results to database, just print to screen",
52-
action='store_true')
53-
gr_db.add_argument('--replace', help='delete all data once done scraping, and start anew', action='store_true')
39+
# gr_db = parser.add_mutually_exclusive_group()
40+
# # how to save results to db
41+
# gr_db.add_argument('-u', '--update', help='replace duplicates', action='store_true')
42+
# gr_db.add_argument('-a', '--append', help='ignore duplicates', action='store_true')
43+
# gr_db.add_argument('-p', '--print', help="don't save results to database, just print to screen",
44+
# action='store_true')
45+
# gr_db.add_argument('--replace', help='delete all data once done scraping, and start anew', action='store_true')
5446
return parser
5547

5648

@@ -74,9 +66,7 @@ def inter_parser(args=None, parser=None):
7466
inp['driver_path'] = vars(args).get("Driver", pconf.driver_path)
7567

7668
inp['search_type'] = vars(args).get("type", 'thing').lower()
77-
inp['volume'] = 'q' if vars(args).get("quiet", False) else \
78-
'v' if vars(args).get("verbose", False) else \
79-
'n'
69+
inp['volume'] = vars(args)['volume']
8070
inp['save_to_db_mode'] = 'u' if vars(args).get("update", False) else \
8171
'a' if vars(args).get("append", False) else \
8272
'p' if vars(args).get("print", False) else \
@@ -85,7 +75,6 @@ def inter_parser(args=None, parser=None):
8575
'j' if vars(args).get("load_json", False) else \
8676
'n'
8777
inp['do_save_json'] = vars(args).get("save_json", False)
88-
inp['Interactive'] = vars(args).get("Interactive", False)
8978
inp['preliminary_count'] = vars(args).get("pre_search", 0) if inp['search_type'] != 'thing' else 0
9079
inp['google_app_id'] = vars(args)['google_app_name']
9180
return inp

main.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ def scrape_main_page(settings, data=None):
138138
logger.debug(f"{i} - (Thing) Failed to retrieve for item id = {key}\n")
139139
else:
140140
logger.debug(f"{i} - (Thing) Success: {key}")
141-
if settings['volume'] == 'v':
141+
if settings['volume'] >= 40:
142142
data_to_scrape[key].print_info()
143143
return data, failed
144144

@@ -190,7 +190,7 @@ def scrape_users_in_db(settings, db):
190190
logger.debug(f"{i} - (User) Failed to retrieve for item id = {k}\n")
191191
else:
192192
logger.debug(f"{i} - (User) Success: {k}")
193-
if settings['volume'] == 'v':
193+
if settings['volume'] >= 40:
194194
user.print_info()
195195
return db, failed
196196

@@ -252,7 +252,7 @@ def scrape_make_in_db(settings, db):
252252
logger.debug(f"{i} - (Make) Failed to retrieve for item id = {k}\n")
253253
else:
254254
logger.debug(f"{i} - (Make) Success: {k}")
255-
if settings['volume'] == 'v':
255+
if settings['volume'] >= 40:
256256
make.print_info()
257257
return db, failed
258258

@@ -311,7 +311,7 @@ def scrape_remixes_in_db(settings, db):
311311
logger.debug(f"{i} - (Remix) Failed to retrieve for item id = {k}\n")
312312
else:
313313
logger.debug(f"{i} - (Remix) Success: {k}")
314-
if settings['volume'] == 'v':
314+
if settings['volume'] >= 40:
315315
remix.print_info()
316316
return db, failed
317317

@@ -396,15 +396,22 @@ def setup_log(log, inp):
396396
file_handler.setLevel(eval(f"logging.{gconf.Logs.LEVEL_LOG}"))
397397

398398
stream_handler = logging.StreamHandler()
399-
if inp['volume'] == 'v':
400-
stream_handler.setLevel(logging.DEBUG)
401-
formatter_stream = logging.Formatter(gconf.Logs.FORMAT_STREAM_V)
402-
elif inp['volume'] == 'q':
399+
if inp['volume'] < 20:
400+
# quite mode
403401
stream_handler.setLevel(logging.INFO)
404402
formatter_stream = logging.Formatter(gconf.Logs.FORMAT_STREAM_Q)
405-
else:
403+
elif inp['volume'] < 30:
404+
# normal mode
406405
stream_handler.setLevel(logging.INFO)
407406
formatter_stream = logging.Formatter(gconf.Logs.FORMAT_STREAM)
407+
elif inp['volume'] < 40:
408+
# debug mode
409+
stream_handler.setLevel(logging.DEBUG)
410+
formatter_stream = logging.Formatter(gconf.Logs.FORMAT_STREAM_V)
411+
else:
412+
# verbose mode
413+
stream_handler.setLevel(logging.DEBUG)
414+
formatter_stream = logging.Formatter(gconf.Logs.FORMAT_STREAM_V)
408415
stream_handler.setFormatter(formatter_stream)
409416
log.addHandler(file_handler)
410417
log.addHandler(stream_handler)
@@ -420,18 +427,11 @@ def main():
420427
logger.info('Opened browser obj')
421428
args['browser_obj'] = browser
422429
data = follow_cli(args, data)
423-
while args['Interactive']:
424-
input('*'*25)
425-
args = cli.inter_parser()
426-
logger.debug(f"args = {args}")
427-
logger.warning("Interactive mode not implemented yet, quiting")
428-
# TODO: implement support for interactive mode
429-
break
430-
data = follow_cli(args)
431430
for k in data:
432431
logger.debug(f"{k}:\n{data[k]}")
433432
input('Done, press any key to quit')
434433
logger.info('Done with browser obj')
434+
logger.info('quiting data miner')
435435

436436

437437
if __name__ == '__main__':

0 commit comments

Comments
 (0)