From dec6ac49f6e60f3cb3bcda25694b3b9dcf2f41cc Mon Sep 17 00:00:00 2001 From: Josh Soref <2119212+jsoref@users.noreply.github.com> Date: Fri, 17 Feb 2023 12:48:24 -0500 Subject: [PATCH] chore(backend): Spelling (#19677) Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> Signed-off-by: Josh Soref Co-authored-by: Josh Soref Co-authored-by: Evan Rusackas --- docs/docs/miscellaneous/chart-params.mdx | 2 +- docs/static/resources/openapi.json | 2 +- helm/superset/values.yaml | 4 ++-- superset/cli/importexport.py | 2 +- superset/cli/update.py | 2 +- ...zed_object.py => not_authorized_object.py} | 0 superset/common/query_object.py | 2 +- superset/config.py | 4 ++-- superset/connectors/sqla/models.py | 6 ++--- superset/dao/base.py | 2 +- .../dashboards/commands/importers/v1/utils.py | 2 +- .../dashboards/filter_sets/commands/base.py | 2 +- superset/db_engine_specs/base.py | 6 ++--- superset/db_engine_specs/hive.py | 4 ++-- superset/db_engine_specs/impala.py | 2 +- superset/db_engine_specs/kylin.py | 4 ++-- .../dashboards/FCC_New_Coder_Survey_2018.yaml | 4 ++-- .../datasets/examples/FCC_2018_Survey.yaml | 6 ++--- superset/examples/multiformat_time_series.py | 2 +- superset/explore/permalink/api.py | 2 +- .../2017-10-03_14-37_4736ec66ce19_.py | 4 ++-- superset/models/helpers.py | 2 +- superset/reports/commands/exceptions.py | 2 +- superset/security/manager.py | 2 +- superset/sql_parse.py | 2 +- .../translations/en/LC_MESSAGES/messages.json | 4 ++-- .../translations/en/LC_MESSAGES/messages.po | 6 ++--- superset/translations/messages.pot | 4 ++-- superset/translations/utils.py | 2 +- superset/utils/core.py | 2 +- superset/utils/date_parser.py | 2 +- .../pandas_postprocessing/contribution.py | 2 +- .../utils/pandas_postprocessing/prophet.py | 2 +- superset/views/base_schemas.py | 2 +- superset/views/core.py | 2 +- superset/views/database/views.py | 24 +++++++++---------- superset/viz.py | 10 ++++---- tests/integration_tests/conftest.py | 2 +- .../integration_tests/databases/api_tests.py | 6 ++--- tests/integration_tests/event_logger_tests.py | 6 ++--- 40 files changed, 74 insertions(+), 74 deletions(-) rename superset/common/{not_authrized_object.py => not_authorized_object.py} (100%) diff --git a/docs/docs/miscellaneous/chart-params.mdx b/docs/docs/miscellaneous/chart-params.mdx index e157bf8b20bed..e7b8e26dc1235 100644 --- a/docs/docs/miscellaneous/chart-params.mdx +++ b/docs/docs/miscellaneous/chart-params.mdx @@ -72,7 +72,7 @@ Note not all fields are correctly categorized. The fields vary based on visualiz | `columns` | _array(string)_ | The **Breakdowns** widget | | `groupby` | _array(string)_ | The **Group by** or **Series** widget | | `limit` | _number_ | The **Series Limit** widget | -| `metric`
`metric_2`
`metrics`
`percent_mertics`
`secondary_metric`
`size`
`x`
`y` | _string_,_object_,_array(string)_,_array(object)_ | The metric(s) depending on the visualization type | +| `metric`
`metric_2`
`metrics`
`percent_metrics`
`secondary_metric`
`size`
`x`
`y` | _string_,_object_,_array(string)_,_array(object)_ | The metric(s) depending on the visualization type | | `order_asc` | _boolean_ | The **Sort Descending** widget | | `row_limit` | _number_ | The **Row limit** widget | | `timeseries_limit_metric` | _object_ | The **Sort By** widget | diff --git a/docs/static/resources/openapi.json b/docs/static/resources/openapi.json index 18ea7a47f8f19..f303b83784026 100644 --- a/docs/static/resources/openapi.json +++ b/docs/static/resources/openapi.json @@ -17285,7 +17285,7 @@ "type": "string" }, "url": { - "description": "pemanent link.", + "description": "permanent link.", "type": "string" } }, diff --git a/helm/superset/values.yaml b/helm/superset/values.yaml index a212754e39162..03238faf01152 100644 --- a/helm/superset/values.yaml +++ b/helm/superset/values.yaml @@ -723,7 +723,7 @@ postgresql: ## Enable PostgreSQL persistence using Persistent Volume Claims. enabled: true ## - ## Persistant class + ## Persistent class # storageClass: classname ## ## Access modes: @@ -778,7 +778,7 @@ redis: ## Use a PVC to persist data. enabled: false ## - ## Persistant class + ## Persistent class # storageClass: classname ## ## Access mode: diff --git a/superset/cli/importexport.py b/superset/cli/importexport.py index 6ca58e9952dfd..c7689569c2436 100755 --- a/superset/cli/importexport.py +++ b/superset/cli/importexport.py @@ -342,7 +342,7 @@ def import_dashboards(path: str, recursive: bool, username: str) -> None: "-s", "sync", default="", - help="comma seperated list of element types to synchronize " + help="comma separated list of element types to synchronize " 'e.g. "metrics,columns" deletes metrics and columns in the DB ' "that are not specified in the YAML file", ) diff --git a/superset/cli/update.py b/superset/cli/update.py index bdc54db3a9c99..d2d416e1d9eec 100755 --- a/superset/cli/update.py +++ b/superset/cli/update.py @@ -120,7 +120,7 @@ def re_encrypt_secrets(previous_secret_key: Optional[str] = None) -> None: except ValueError as exc: click.secho( f"An error occurred, " - f"probably an invalid previoud secret key was provided. Error:[{exc}]", + f"probably an invalid previous secret key was provided. Error:[{exc}]", err=True, ) sys.exit(1) diff --git a/superset/common/not_authrized_object.py b/superset/common/not_authorized_object.py similarity index 100% rename from superset/common/not_authrized_object.py rename to superset/common/not_authorized_object.py diff --git a/superset/common/query_object.py b/superset/common/query_object.py index 94cf2a74ccaa9..70f50c3e77603 100644 --- a/superset/common/query_object.py +++ b/superset/common/query_object.py @@ -360,7 +360,7 @@ def cache_key(self, **extra: Any) -> str: # TODO: the below KVs can all be cleaned up and moved to `to_dict()` at some # predetermined point in time when orgs are aware that the previously - # chached results will be invalidated. + # cached results will be invalidated. if not self.apply_fetch_values_predicate: del cache_dict["apply_fetch_values_predicate"] if self.datasource: diff --git a/superset/config.py b/superset/config.py index ab23da0c29f95..5a0aea77fbee5 100644 --- a/superset/config.py +++ b/superset/config.py @@ -198,7 +198,7 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: # SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp' # SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp' -# In order to hook up a custom password store for all SQLACHEMY connections +# In order to hook up a custom password store for all SQLALCHEMY connections # implement a function that takes a single argument of type 'sqla.engine.url', # returns a password and set SQLALCHEMY_CUSTOM_PASSWORD_STORE. # @@ -411,7 +411,7 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: "ENABLE_TEMPLATE_REMOVE_FILTERS": False, # Allow for javascript controls components # this enables programmers to customize certain charts (like the - # geospatial ones) by inputing javascript in controls. This exposes + # geospatial ones) by inputting javascript in controls. This exposes # an XSS security vulnerability "ENABLE_JAVASCRIPT_CONTROLS": False, "KV_STORE": False, diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 8be079bde21c5..7ad5ad0312053 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -755,7 +755,7 @@ def sql_url(self) -> str: return self.database.sql_url + "?table_name=" + str(self.table_name) def external_metadata(self) -> List[Dict[str, str]]: - # todo(yongjie): create a pysical table column type in seprated PR + # todo(yongjie): create a physical table column type in a separate PR if self.sql: return get_virtual_table_metadata(dataset=self) # type: ignore return get_physical_table_metadata( @@ -1249,7 +1249,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma if metrics_exprs: main_metric_expr = metrics_exprs[0] else: - main_metric_expr, label = literal_column("COUNT(*)"), "ccount" + main_metric_expr, label = literal_column("COUNT(*)"), "count" main_metric_expr = self.make_sqla_column_compatible(main_metric_expr, label) # To ensure correct handling of the ORDER BY labeling we need to reference the @@ -1419,7 +1419,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma # Order by columns are "hidden" columns, some databases require them # always be present in SELECT if an aggregation function is used - if not db_engine_spec.allows_hidden_ordeby_agg: + if not db_engine_spec.allows_hidden_orderby_agg: select_exprs = remove_duplicates(select_exprs + orderby_exprs) qry = sa.select(select_exprs) diff --git a/superset/dao/base.py b/superset/dao/base.py index 126238f661323..28cfdf2cc625e 100644 --- a/superset/dao/base.py +++ b/superset/dao/base.py @@ -44,7 +44,7 @@ class BaseDAO: """ base_filter: Optional[BaseFilter] = None """ - Child classes can register base filtering to be aplied to all filter methods + Child classes can register base filtering to be applied to all filter methods """ id_column_name = "id" diff --git a/superset/dashboards/commands/importers/v1/utils.py b/superset/dashboards/commands/importers/v1/utils.py index 513d1efcdb212..cd54c3cefeb48 100644 --- a/superset/dashboards/commands/importers/v1/utils.py +++ b/superset/dashboards/commands/importers/v1/utils.py @@ -79,7 +79,7 @@ def update_id_refs( # pylint: disable=too-many-locals ] if "filter_scopes" in metadata: - # in filter_scopes the key is the chart ID as a string; we need to udpate + # in filter_scopes the key is the chart ID as a string; we need to update # them to be the new ID as a string: metadata["filter_scopes"] = { str(id_map[int(old_id)]): columns diff --git a/superset/dashboards/filter_sets/commands/base.py b/superset/dashboards/filter_sets/commands/base.py index e6a4b03e3faa0..a7897eca8e7c7 100644 --- a/superset/dashboards/filter_sets/commands/base.py +++ b/superset/dashboards/filter_sets/commands/base.py @@ -20,7 +20,7 @@ from flask_appbuilder.models.sqla import Model from superset import security_manager -from superset.common.not_authrized_object import NotAuthorizedException +from superset.common.not_authorized_object import NotAuthorizedException from superset.dashboards.commands.exceptions import DashboardNotFoundError from superset.dashboards.dao import DashboardDAO from superset.dashboards.filter_sets.commands.exceptions import ( diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index 49a01c0597d57..128ce511be684 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -121,7 +121,7 @@ class TimestampExpression( ): # pylint: disable=abstract-method, too-many-ancestors def __init__(self, expr: str, col: ColumnClause, **kwargs: Any) -> None: """Sqlalchemy class that can be can be used to render native column elements - respeting engine-specific quoting rules as part of a string-based expression. + respecting engine-specific quoting rules as part of a string-based expression. :param expr: Sql expression with '{col}' denoting the locations where the col object will be rendered. @@ -331,9 +331,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods # Whether ORDER BY clause must appear in SELECT # if TRUE, then it doesn't have to. - allows_hidden_ordeby_agg = True + allows_hidden_orderby_agg = True - # Whether ORDER BY clause can use sql caculated expression + # Whether ORDER BY clause can use sql calculated expression # if True, use alias of select column for `order by` # the True is safely for most database # But for backward compatibility, False by default diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index c049ee652eee4..63c531e843edf 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -96,7 +96,7 @@ class HiveEngineSpec(PrestoEngineSpec): engine_name = "Apache Hive" max_column_name_length = 767 allows_alias_to_source_column = True - allows_hidden_ordeby_agg = False + allows_hidden_orderby_agg = False # When running `SHOW FUNCTIONS`, what is the name of the column with the # function names? @@ -491,7 +491,7 @@ def get_url_for_impersonation( :param username: Effective username """ # Do nothing in the URL object since instead this should modify - # the configuraiton dictionary. See get_configuration_for_impersonation + # the configuration dictionary. See get_configuration_for_impersonation return url @classmethod diff --git a/superset/db_engine_specs/impala.py b/superset/db_engine_specs/impala.py index 5de1e690c6c9f..e59c2b74fbee0 100644 --- a/superset/db_engine_specs/impala.py +++ b/superset/db_engine_specs/impala.py @@ -80,7 +80,7 @@ def get_schema_names(cls, inspector: Inspector) -> List[str]: def has_implicit_cancel(cls) -> bool: """ Return True if the live cursor handles the implicit cancelation of the query, - False otherise. + False otherwise. :return: Whether the live cursor implicitly cancels the query :see: handle_cursor diff --git a/superset/db_engine_specs/kylin.py b/superset/db_engine_specs/kylin.py index d76811e86c36c..e340daea51f95 100644 --- a/superset/db_engine_specs/kylin.py +++ b/superset/db_engine_specs/kylin.py @@ -49,6 +49,6 @@ def convert_dttm( if isinstance(sqla_type, types.Date): return f"CAST('{dttm.date().isoformat()}' AS DATE)" if isinstance(sqla_type, types.TIMESTAMP): - datetime_fomatted = dttm.isoformat(sep=" ", timespec="seconds") - return f"""CAST('{datetime_fomatted}' AS TIMESTAMP)""" + datetime_formatted = dttm.isoformat(sep=" ", timespec="seconds") + return f"""CAST('{datetime_formatted}' AS TIMESTAMP)""" return None diff --git a/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml b/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml index a007c5ffc8369..f7cfedd84d2d4 100644 --- a/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml +++ b/superset/examples/configs/dashboards/FCC_New_Coder_Survey_2018.yaml @@ -504,7 +504,7 @@ position: code: '## FreeCodeCamp New Coder Survey 2018 - Every year, FCC surveys its user base (mostly budding softwware developers) + Every year, FCC surveys its user base (mostly budding software developers) to learn more about their interests, backgrounds, goals, job status, and socioeconomic features. This dashboard visualizes survey data from the 2018 survey. @@ -529,7 +529,7 @@ position: meta: code: "# Demographics\n\nFreeCodeCamp is a completely-online community of people\ \ learning to code and consists of aspiring & current developers from all\ - \ over the world. That doesn't necessarily mean that acccess to these types\ + \ over the world. That doesn't necessarily mean that access to these types\ \ of opportunities are evenly distributed. \n\nThe following charts can begin\ \ to help us understand:\n\n- the original citizenship of the survey respondents\n\ - minority representation among both aspiring and current developers\n- their\ diff --git a/superset/examples/configs/datasets/examples/FCC_2018_Survey.yaml b/superset/examples/configs/datasets/examples/FCC_2018_Survey.yaml index 26890b5ebbbce..5bbbe2f74b934 100644 --- a/superset/examples/configs/datasets/examples/FCC_2018_Survey.yaml +++ b/superset/examples/configs/datasets/examples/FCC_2018_Survey.yaml @@ -930,7 +930,7 @@ columns: expression: null description: null python_date_format: null -- column_name: rsrc_pluralsght +- column_name: rsrc_pluralsight verbose_name: null is_dttm: false is_active: null @@ -1320,7 +1320,7 @@ columns: expression: null description: null python_date_format: null -- column_name: curr_emplymnt_other +- column_name: curr_employment_other verbose_name: null is_dttm: false is_active: null @@ -1450,7 +1450,7 @@ columns: expression: null description: null python_date_format: null -- column_name: curr_emplymnt +- column_name: curr_employment verbose_name: null is_dttm: false is_active: null diff --git a/superset/examples/multiformat_time_series.py b/superset/examples/multiformat_time_series.py index b030bcdb0f23c..de9630ef58503 100644 --- a/superset/examples/multiformat_time_series.py +++ b/superset/examples/multiformat_time_series.py @@ -98,7 +98,7 @@ def load_multiformat_time_series( # pylint: disable=too-many-locals for col in obj.columns: dttm_and_expr = dttm_and_expr_dict[col.column_name] col.python_date_format = dttm_and_expr[0] - col.dbatabase_expr = dttm_and_expr[1] + col.database_expression = dttm_and_expr[1] col.is_dttm = True db.session.merge(obj) db.session.commit() diff --git a/superset/explore/permalink/api.py b/superset/explore/permalink/api.py index 88e819aa2b0c2..0d12f59e2ccac 100644 --- a/superset/explore/permalink/api.py +++ b/superset/explore/permalink/api.py @@ -83,7 +83,7 @@ def post(self) -> Response: description: The key to retrieve the permanent link data. url: type: string - description: pemanent link. + description: permanent link. 400: $ref: '#/components/responses/400' 401: diff --git a/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py b/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py index 99b3e0b5e3500..7fe94f3c8a9c4 100644 --- a/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py +++ b/superset/migrations/versions/2017-10-03_14-37_4736ec66ce19_.py @@ -97,7 +97,7 @@ def upgrade(): # Drop the datasource_name column and associated constraints. Note # due to prior revisions (1226819ee0e3, 3b626e2a6783) there may - # incorectly be multiple duplicate constraints. + # incorrectly be multiple duplicate constraints. names = generic_find_fk_constraint_names( foreign, {"datasource_name"}, "datasources", insp ) @@ -124,7 +124,7 @@ def upgrade(): logging.warning( "Constraint drop failed, you may want to do this " "manually on your database. For context, this is a known " - "issue around undeterministic contraint names on Postgres " + "issue around nondeterministic constraint names on Postgres " "and perhaps more databases through SQLAlchemy." ) logging.exception(ex) diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 15b7a420a079e..5cc80576d0bcd 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -1620,7 +1620,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma # Order by columns are "hidden" columns, some databases require them # always be present in SELECT if an aggregation function is used - if not db_engine_spec.allows_hidden_ordeby_agg: + if not db_engine_spec.allows_hidden_orderby_agg: select_exprs = utils.remove_duplicates(select_exprs + orderby_exprs) qry = sa.select(select_exprs) diff --git a/superset/reports/commands/exceptions.py b/superset/reports/commands/exceptions.py index b908042f19a11..22aff0727da46 100644 --- a/superset/reports/commands/exceptions.py +++ b/superset/reports/commands/exceptions.py @@ -77,7 +77,7 @@ def __init__(self) -> None: class ReportScheduleOnlyChartOrDashboardError(ValidationError): """ - Marshmallow validation error for report schedule accept exlusive chart or dashboard + Marshmallow validation error for report schedule accept exclusive chart or dashboard """ def __init__(self) -> None: diff --git a/superset/security/manager.py b/superset/security/manager.py index 4e174c420dddb..b3fa1a6c53501 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -767,7 +767,7 @@ def sync_role_definitions(self) -> None: def _get_pvms_from_builtin_role(self, role_name: str) -> List[PermissionView]: """ - Gets a list of model PermissionView permissions infered from a builtin role + Gets a list of model PermissionView permissions inferred from a builtin role definition """ role_from_permissions_names = self.builtin_roles.get(role_name, []) diff --git a/superset/sql_parse.py b/superset/sql_parse.py index ab2f04417249c..ef2c38ccfa182 100644 --- a/superset/sql_parse.py +++ b/superset/sql_parse.py @@ -217,7 +217,7 @@ def limit(self) -> Optional[int]: return self._limit def is_select(self) -> bool: - # make sure we strip comments; prevents a bug with coments in the CTE + # make sure we strip comments; prevents a bug with comments in the CTE parsed = sqlparse.parse(self.strip_comments()) if parsed[0].get_type() == "SELECT": return True diff --git a/superset/translations/en/LC_MESSAGES/messages.json b/superset/translations/en/LC_MESSAGES/messages.json index f344b2c2ec4e4..50d30c5e24987 100644 --- a/superset/translations/en/LC_MESSAGES/messages.json +++ b/superset/translations/en/LC_MESSAGES/messages.json @@ -481,7 +481,7 @@ "Alert fired during grace period.": [""], "Alert ended grace period.": [""], "Alert on grace period": [""], - "Report Schedule sellenium user not found": [""], + "Report Schedule selenium user not found": [""], "Report Schedule state not found": [""], "Report schedule unexpected error": [""], "Changing this report is forbidden": [""], @@ -526,7 +526,7 @@ ], "Request Permissions": [""], "Cancel": [""], - "Use the edit buttom to change this field": [""], + "Use the edit button to change this field": [""], "Test Connection": [""], "[Superset] Access to the datasource %(name)s was granted": [""], "Unable to find such a holiday: [{}]": [""], diff --git a/superset/translations/en/LC_MESSAGES/messages.po b/superset/translations/en/LC_MESSAGES/messages.po index 6faf2b1c7565f..d5b79fd88c25e 100644 --- a/superset/translations/en/LC_MESSAGES/messages.po +++ b/superset/translations/en/LC_MESSAGES/messages.po @@ -7879,7 +7879,7 @@ msgstr "" #: superset/views/core.py:2075 msgid "" "One or more required fields are missing in the request. Please try again," -" and if the problem persists conctact your administrator." +" and if the problem persists contact your administrator." msgstr "" #: superset-frontend/src/dashboard/components/SliceHeader/index.tsx:46 @@ -9228,7 +9228,7 @@ msgid "Report Schedule reached a working timeout." msgstr "" #: superset/reports/commands/exceptions.py:226 -msgid "Report Schedule sellenium user not found" +msgid "Report Schedule selenium user not found" msgstr "" #: superset/reports/commands/exceptions.py:230 @@ -13112,7 +13112,7 @@ msgid "" msgstr "" #: superset/templates/superset/fab_overrides/list_with_checkboxes.html:82 -msgid "Use the edit buttom to change this field" +msgid "Use the edit button to change this field" msgstr "" #: superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx:176 diff --git a/superset/translations/messages.pot b/superset/translations/messages.pot index ad475ef1e3572..79e2d0584c351 100644 --- a/superset/translations/messages.pot +++ b/superset/translations/messages.pot @@ -7886,7 +7886,7 @@ msgstr "" #: superset/views/core.py:2075 msgid "" "One or more required fields are missing in the request. Please try again," -" and if the problem persists conctact your administrator." +" and if the problem persists contact your administrator." msgstr "" #: superset-frontend/src/dashboard/components/SliceHeader/index.tsx:46 @@ -9235,7 +9235,7 @@ msgid "Report Schedule reached a working timeout." msgstr "" #: superset/reports/commands/exceptions.py:226 -msgid "Report Schedule sellenium user not found" +msgid "Report Schedule selenium user not found" msgstr "" #: superset/reports/commands/exceptions.py:230 diff --git a/superset/translations/utils.py b/superset/translations/utils.py index 25a698f0e1114..79d01539a16e1 100644 --- a/superset/translations/utils.py +++ b/superset/translations/utils.py @@ -27,7 +27,7 @@ def get_language_pack(locale: str) -> Optional[Dict[str, Any]]: """Get/cache a language pack - Returns the langugage pack from cache if it exists, caches otherwise + Returns the language pack from cache if it exists, caches otherwise >>> get_language_pack('fr')['Dashboards'] "Tableaux de bords" diff --git a/superset/utils/core.py b/superset/utils/core.py index 6f86372f753f6..06f2f63df1797 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -1700,7 +1700,7 @@ def get_column_name_from_metric(metric: Metric) -> Optional[str]: def get_column_names_from_metrics(metrics: List[Metric]) -> List[str]: """ - Extract the columns that a list of metrics are referencing. Expcludes all + Extract the columns that a list of metrics are referencing. Excludes all SQL metrics. :param metrics: Ad-hoc metric diff --git a/superset/utils/date_parser.py b/superset/utils/date_parser.py index 7e79c72f1eb78..72c32bba4e6da 100644 --- a/superset/utils/date_parser.py +++ b/superset/utils/date_parser.py @@ -153,7 +153,7 @@ def get_since_until( # pylint: disable=too-many-arguments,too-many-locals,too-m """Return `since` and `until` date time tuple from string representations of time_range, since, until and time_shift. - This functiom supports both reading the keys separately (from `since` and + This function supports both reading the keys separately (from `since` and `until`), as well as the new `time_range` key. Valid formats are: - ISO 8601 diff --git a/superset/utils/pandas_postprocessing/contribution.py b/superset/utils/pandas_postprocessing/contribution.py index 2bfc6f4be698f..f8519f39a9729 100644 --- a/superset/utils/pandas_postprocessing/contribution.py +++ b/superset/utils/pandas_postprocessing/contribution.py @@ -35,7 +35,7 @@ def contribution( rename_columns: Optional[List[str]] = None, ) -> DataFrame: """ - Calculate cell contibution to row/column total for numeric columns. + Calculate cell contribution to row/column total for numeric columns. Non-numeric columns will be kept untouched. If `columns` are specified, only calculate contributions on selected columns. diff --git a/superset/utils/pandas_postprocessing/prophet.py b/superset/utils/pandas_postprocessing/prophet.py index d66298b1790cc..6d733296adf54 100644 --- a/superset/utils/pandas_postprocessing/prophet.py +++ b/superset/utils/pandas_postprocessing/prophet.py @@ -120,7 +120,7 @@ def prophet( # pylint: disable=too-many-arguments ) ) freq = PROPHET_TIME_GRAIN_MAP[time_grain] - # check type at runtime due to marhsmallow schema not being able to handle + # check type at runtime due to marshmallow schema not being able to handle # union types if not isinstance(periods, int) or periods < 0: raise InvalidPostProcessingError(_("Periods must be a whole number")) diff --git a/superset/views/base_schemas.py b/superset/views/base_schemas.py index 778f737fe09cb..8f4ed7735cc06 100644 --- a/superset/views/base_schemas.py +++ b/superset/views/base_schemas.py @@ -40,7 +40,7 @@ def validate_owner(value: int) -> None: class BaseSupersetSchema(Schema): """ Extends Marshmallow schema so that we can pass a Model to load - (following marshamallow-sqlalchemy pattern). This is useful + (following marshmallow-sqlalchemy pattern). This is useful to perform partial model merges on HTTP PUT """ diff --git a/superset/views/core.py b/superset/views/core.py index d3dfdb017cb48..7ee053e881c0e 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1977,7 +1977,7 @@ def sqllab_viz(self) -> FlaskResponse: # pylint: disable=no-self-use raise SupersetGenericErrorException( __( "One or more required fields are missing in the request. Please try " - "again, and if the problem persists conctact your administrator." + "again, and if the problem persists contact your administrator." ), status=400, ) from ex diff --git a/superset/views/database/views.py b/superset/views/database/views.py index 5f94fe6fc8737..037128ee16592 100644 --- a/superset/views/database/views.py +++ b/superset/views/database/views.py @@ -232,10 +232,10 @@ def form_post(self, form: CsvToDatabaseForm) -> Response: # Connect table to the database that should be used for exploration. # E.g. if hive was used to upload a csv, presto will be a better option # to explore the table. - expore_database = database + explore_database = database explore_database_id = database.explore_database_id if explore_database_id: - expore_database = ( + explore_database = ( db.session.query(models.Database) .filter_by(id=explore_database_id) .one_or_none() @@ -247,7 +247,7 @@ def form_post(self, form: CsvToDatabaseForm) -> Response: .filter_by( table_name=csv_table.table, schema=csv_table.schema, - database_id=expore_database.id, + database_id=explore_database.id, ) .one_or_none() ) @@ -256,7 +256,7 @@ def form_post(self, form: CsvToDatabaseForm) -> Response: sqla_table.fetch_metadata() if not sqla_table: sqla_table = SqlaTable(table_name=csv_table.table) - sqla_table.database = expore_database + sqla_table.database = explore_database sqla_table.database_id = database.id sqla_table.owners = [g.user] sqla_table.schema = csv_table.schema @@ -369,10 +369,10 @@ def form_post(self, form: ExcelToDatabaseForm) -> Response: # Connect table to the database that should be used for exploration. # E.g. if hive was used to upload a excel, presto will be a better option # to explore the table. - expore_database = database + explore_database = database explore_database_id = database.explore_database_id if explore_database_id: - expore_database = ( + explore_database = ( db.session.query(models.Database) .filter_by(id=explore_database_id) .one_or_none() @@ -384,7 +384,7 @@ def form_post(self, form: ExcelToDatabaseForm) -> Response: .filter_by( table_name=excel_table.table, schema=excel_table.schema, - database_id=expore_database.id, + database_id=explore_database.id, ) .one_or_none() ) @@ -393,7 +393,7 @@ def form_post(self, form: ExcelToDatabaseForm) -> Response: sqla_table.fetch_metadata() if not sqla_table: sqla_table = SqlaTable(table_name=excel_table.table) - sqla_table.database = expore_database + sqla_table.database = explore_database sqla_table.database_id = database.id sqla_table.owners = [g.user] sqla_table.schema = excel_table.schema @@ -510,10 +510,10 @@ def form_post( # pylint: disable=too-many-locals # Connect table to the database that should be used for exploration. # E.g. if hive was used to upload a csv, presto will be a better option # to explore the table. - expore_database = database + explore_database = database explore_database_id = database.explore_database_id if explore_database_id: - expore_database = ( + explore_database = ( db.session.query(models.Database) .filter_by(id=explore_database_id) .one_or_none() @@ -525,7 +525,7 @@ def form_post( # pylint: disable=too-many-locals .filter_by( table_name=columnar_table.table, schema=columnar_table.schema, - database_id=expore_database.id, + database_id=explore_database.id, ) .one_or_none() ) @@ -534,7 +534,7 @@ def form_post( # pylint: disable=too-many-locals sqla_table.fetch_metadata() if not sqla_table: sqla_table = SqlaTable(table_name=columnar_table.table) - sqla_table.database = expore_database + sqla_table.database = explore_database sqla_table.database_id = database.id sqla_table.owners = [g.user] sqla_table.schema = columnar_table.schema diff --git a/superset/viz.py b/superset/viz.py index 1f4c795325b4b..d8f0dc342b127 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -454,7 +454,7 @@ def cache_key(self, query_obj: QueryObjectDict, **extra: Any) -> str: "5 days ago" or "now"). The `extra` arguments are currently used by time shift queries, since - different time shifts wil differ only in the `from_dttm`, `to_dttm`, + different time shifts will differ only in the `from_dttm`, `to_dttm`, `inner_from_dttm`, and `inner_to_dttm` values which are stripped. """ cache_dict = copy.copy(query_obj) @@ -1708,9 +1708,9 @@ def get_data(self, df: pd.DataFrame) -> VizData: values=utils.get_metric_name(self.form_data["metric"]), ) chart_data = self.to_series(df) - for serie in chart_data: - serie["rank"] = rank_lookup[serie["key"]] - serie["perc"] = 1 - (serie["rank"] / (max_rank + 1)) + for series in chart_data: + series["rank"] = rank_lookup[series["key"]] + series["perc"] = 1 - (series["rank"] / (max_rank + 1)) return chart_data @@ -2020,7 +2020,7 @@ def get_data(self, df: pd.DataFrame) -> VizData: df.columns = ["source", "target", "value"] - # Preparing a symetrical matrix like d3.chords calls for + # Preparing a symmetrical matrix like d3.chords calls for nodes = list(set(df["source"]) | set(df["target"])) matrix = {} for source, target in product(nodes, nodes): diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py index 5c132381b1930..0ea5bb5106b15 100644 --- a/tests/integration_tests/conftest.py +++ b/tests/integration_tests/conftest.py @@ -134,7 +134,7 @@ def setup_sample_data() -> Any: yield with app.app_context(): - # drop sqlachemy tables + # drop sqlalchemy tables db.session.commit() from sqlalchemy.ext import declarative diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index f4968edae9e7f..b015e4c59bbe7 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -884,9 +884,9 @@ def test_create_database_uri_validate(self): "superset.views.core.app.config", {**app.config, "PREVENT_UNSAFE_DB_CONNECTIONS": True}, ) - def test_create_database_fail_sqllite(self): + def test_create_database_fail_sqlite(self): """ - Database API: Test create fail with sqllite + Database API: Test create fail with sqlite """ database_data = { "database_name": "test-create-sqlite-database", @@ -1378,7 +1378,7 @@ def test_get_select_star_not_found_table(self): """ self.login(username="admin") example_db = get_example_database() - # sqllite will not raise a NoSuchTableError + # sqlite will not raise a NoSuchTableError if example_db.backend == "sqlite": return uri = f"api/v1/database/{example_db.id}/select_star/table_does_not_exist/" diff --git a/tests/integration_tests/event_logger_tests.py b/tests/integration_tests/event_logger_tests.py index 4553bb9dc789b..fa965ebd7d208 100644 --- a/tests/integration_tests/event_logger_tests.py +++ b/tests/integration_tests/event_logger_tests.py @@ -179,7 +179,7 @@ def log( duration=timedelta(days=64, seconds=29156, microseconds=10), object_ref={"baz": "food"}, log_to_statsd=False, - payload_override={"engine": "sqllite"}, + payload_override={"engine": "sqlite"}, ) assert logger.records == [ @@ -188,7 +188,7 @@ def log( { "path": "/", "object_ref": {"baz": "food"}, - "payload_override": {"engine": "sqllite"}, + "payload_override": {"engine": "sqlite"}, } ], "user_id": 2, @@ -226,7 +226,7 @@ def log( duration=timedelta(days=64, seconds=29156, microseconds=10), object_ref={"baz": "food"}, log_to_statsd=False, - payload_override={"engine": "sqllite"}, + payload_override={"engine": "sqlite"}, ) assert logger.records[0]["user_id"] == None