|
1 | 1 | from copy import deepcopy
|
2 | 2 |
|
3 | 3 | from cumulus_lambda_functions.lib.json_validator import JsonValidator
|
4 |
| - |
| 4 | +from cumulus_lambda_functions.lib.lambda_logger_generator import LambdaLoggerGenerator |
5 | 5 |
|
6 | 6 | API_GATEWAY_EVENT_SCHEMA = {
|
7 | 7 | 'type': 'object',
|
|
34 | 34 | }
|
35 | 35 | }
|
36 | 36 | }
|
| 37 | +LOGGER = LambdaLoggerGenerator.get_logger(__name__, LambdaLoggerGenerator.get_level_from_env()) |
37 | 38 |
|
38 | 39 |
|
39 | 40 | class LambdaApiGatewayUtils:
|
40 |
| - @staticmethod |
41 |
| - def generate_requesting_url(event: dict): |
| 41 | + def __init__(self, event: dict, default_limit: int = 10): |
| 42 | + self.__event = event |
| 43 | + self.__default_limit = default_limit |
42 | 44 | api_gateway_event_validator_result = JsonValidator(API_GATEWAY_EVENT_SCHEMA).validate(event)
|
43 | 45 | if api_gateway_event_validator_result is not None:
|
44 | 46 | raise ValueError(f'invalid event: {api_gateway_event_validator_result}. event: {event}')
|
45 |
| - requesting_url = f"https://{event['headers']['Host']}{event['requestContext']['path']}" |
| 47 | + |
| 48 | + def __get_current_page(self): |
| 49 | + try: |
| 50 | + requesting_base_url = f"https://{self.__event['headers']['Host']}{self.__event['requestContext']['path']}" |
| 51 | + new_queries = deepcopy(self.__event['queryStringParameters']) if 'queryStringParameters' in self.__event and self.__event[ |
| 52 | + 'queryStringParameters'] is not None else {} |
| 53 | + limit = int(new_queries['limit'] if 'limit' in new_queries else self.__default_limit) |
| 54 | + offset = int(new_queries['offset'] if 'offset' in new_queries else 0) |
| 55 | + new_queries['limit'] = limit |
| 56 | + new_queries['offset'] = offset |
| 57 | + requesting_url = f"{requesting_base_url}?{'&'.join([f'{k}={v}' for k, v in new_queries.items()])}" |
| 58 | + except Exception as e: |
| 59 | + LOGGER.exception(f'error while getting current page URL') |
| 60 | + return f'unable to get current page URL, {str(e)}' |
46 | 61 | return requesting_url
|
47 | 62 |
|
48 |
| - @staticmethod |
49 |
| - def generate_next_url(event: dict, default_limit: int = 10): |
50 |
| - requesting_base_url = LambdaApiGatewayUtils.generate_requesting_url(event) |
51 |
| - new_queries = deepcopy(event['queryStringParameters']) if 'queryStringParameters' in event and event['queryStringParameters'] is not None else {} |
52 |
| - limit = int(new_queries['limit'] if 'limit' in new_queries else default_limit) |
53 |
| - if limit == 0: |
54 |
| - return '' |
55 |
| - offset = int(new_queries['offset'] if 'offset' in new_queries else 0) |
56 |
| - offset += limit |
57 |
| - new_queries['limit'] = limit |
58 |
| - new_queries['offset'] = offset |
59 |
| - requesting_url = f"{requesting_base_url}?{'&'.join([f'{k}={v}' for k, v in new_queries.items()])}" |
| 63 | + def __get_next_page(self): |
| 64 | + try: |
| 65 | + requesting_base_url = f"https://{self.__event['headers']['Host']}{self.__event['requestContext']['path']}" |
| 66 | + new_queries = deepcopy(self.__event['queryStringParameters']) if 'queryStringParameters' in self.__event and self.__event[ |
| 67 | + 'queryStringParameters'] is not None else {} |
| 68 | + limit = int(new_queries['limit'] if 'limit' in new_queries else self.__default_limit) |
| 69 | + if limit == 0: |
| 70 | + return '' |
| 71 | + offset = int(new_queries['offset'] if 'offset' in new_queries else 0) |
| 72 | + offset += limit |
| 73 | + new_queries['limit'] = limit |
| 74 | + new_queries['offset'] = offset |
| 75 | + requesting_url = f"{requesting_base_url}?{'&'.join([f'{k}={v}' for k, v in new_queries.items()])}" |
| 76 | + except Exception as e: |
| 77 | + LOGGER.exception(f'error while getting next page URL') |
| 78 | + return f'unable to get next page URL, {str(e)}' |
60 | 79 | return requesting_url
|
61 | 80 |
|
62 |
| - @staticmethod |
63 |
| - def generate_prev_url(event: dict, default_limit: int = 10): |
64 |
| - requesting_base_url = LambdaApiGatewayUtils.generate_requesting_url(event) |
65 |
| - new_queries = deepcopy(event['queryStringParameters']) if 'queryStringParameters' in event and event['queryStringParameters'] is not None else {} |
66 |
| - limit = int(new_queries['limit'] if 'limit' in new_queries else default_limit) |
67 |
| - if limit == 0: |
68 |
| - return '' |
69 |
| - offset = int(new_queries['offset'] if 'offset' in new_queries else 0) |
70 |
| - offset -= limit |
71 |
| - if offset < 0: |
72 |
| - offset = 0 |
73 |
| - new_queries['limit'] = limit |
74 |
| - new_queries['offset'] = offset |
75 |
| - requesting_url = f"{requesting_base_url}?{'&'.join([f'{k}={v}' for k, v in new_queries.items()])}" |
| 81 | + def __get_prev_page(self): |
| 82 | + try: |
| 83 | + requesting_base_url = f"https://{self.__event['headers']['Host']}{self.__event['requestContext']['path']}" |
| 84 | + new_queries = deepcopy(self.__event['queryStringParameters']) if 'queryStringParameters' in self.__event and self.__event[ |
| 85 | + 'queryStringParameters'] is not None else {} |
| 86 | + limit = int(new_queries['limit'] if 'limit' in new_queries else self.__default_limit) |
| 87 | + if limit == 0: |
| 88 | + return '' |
| 89 | + offset = int(new_queries['offset'] if 'offset' in new_queries else 0) |
| 90 | + offset -= limit |
| 91 | + if offset < 0: |
| 92 | + offset = 0 |
| 93 | + new_queries['limit'] = limit |
| 94 | + new_queries['offset'] = offset |
| 95 | + requesting_url = f"{requesting_base_url}?{'&'.join([f'{k}={v}' for k, v in new_queries.items()])}" |
| 96 | + except Exception as e: |
| 97 | + LOGGER.exception(f'error while getting previous page URL') |
| 98 | + return f'unable to get previous page URL, {str(e)}' |
76 | 99 | return requesting_url
|
| 100 | + |
| 101 | + def generate_pagination_links(self): |
| 102 | + return [ |
| 103 | + {'rel': 'self', 'href': self.__get_current_page()}, |
| 104 | + {'rel': 'root', 'href': f"https://{self.__event['headers']['Host']}"}, |
| 105 | + {'rel': 'next', 'href': self.__get_next_page()}, |
| 106 | + {'rel': 'prev', 'href': self.__get_prev_page()}, |
| 107 | + ] |
0 commit comments