Skip to content

Commit

Permalink
feat: support python 3.5
Browse files Browse the repository at this point in the history
  • Loading branch information
dungdm93 committed Apr 26, 2021
1 parent 6f5072f commit 8f68421
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 20 deletions.
2 changes: 1 addition & 1 deletion trino/sqlalchemy/compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def visit_FLOAT(self, type_, **kw):
elif 32 < precision <= 64:
return self.visit_DOUBLE(type_, **kw)
else:
raise ValueError(f"type.precision={type_.precision} is invalid")
raise ValueError("type.precision={precision} is invalid".format(precision=type_.precision))

def visit_DOUBLE(self, type_, **kw):
return "DOUBLE"
Expand Down
6 changes: 3 additions & 3 deletions trino/sqlalchemy/datatype.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def parse_sqltype(type_str: str) -> TypeEngine:
type_str = type_str.strip().lower()
match = re.match(r'^(?P<type>\w+)\s*(?:\((?P<options>.*)\))?', type_str)
if not match:
util.warn(f"Could not parse type name '{type_str}'")
util.warn("Could not parse type name '{type_str}'".format(type_str=type_str))
return sqltypes.NULLTYPE
type_name = match.group("type")
type_opts = match.group("options")
Expand All @@ -151,15 +151,15 @@ def parse_sqltype(type_str: str) -> TypeEngine:
value_type = parse_sqltype(value_type_str)
return MAP(key_type, value_type)
elif type_name == "row":
attr_types: Dict[str, SQLType] = {}
attr_types = {} # type: Dict[str, SQLType]
for attr_str in split(type_opts):
name, attr_type_str = split(attr_str.strip(), delimiter=' ')
attr_type = parse_sqltype(attr_type_str)
attr_types[name] = attr_type
return ROW(attr_types)

if type_name not in _type_map:
util.warn(f"Did not recognize type '{type_name}'")
util.warn("Did not recognize type '{type_name}'".format(type_name=type_name))
return sqltypes.NULLTYPE
type_class = _type_map[type_name]
type_args = [int(o.strip()) for o in type_opts.split(',')] if type_opts else []
Expand Down
34 changes: 18 additions & 16 deletions trino/sqlalchemy/dialect.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,12 @@

from sqlalchemy import exc, sql
from sqlalchemy.engine.base import Connection
from sqlalchemy.engine.default import DefaultDialect
from sqlalchemy.engine.default import DefaultDialect, DefaultExecutionContext
from sqlalchemy.engine.url import URL

from trino import dbapi as trino_dbapi
from trino.auth import BasicAuthentication
from trino.dbapi import Cursor

from . import compiler, datatype, error


Expand Down Expand Up @@ -69,7 +68,7 @@ def create_connect_args(self, url: URL) -> Tuple[List[Any], Dict[str, Any]]:
kwargs['catalog'] = db_parts[0]
kwargs['schema'] = db_parts[1]
else:
raise ValueError(f'Unexpected database format {url.database}')
raise ValueError('Unexpected database format {database}'.format(database=url.database))

username = kwargs.pop('username', 'anonymous')
kwargs['user'] = username
Expand All @@ -84,7 +83,8 @@ def create_connect_args(self, url: URL) -> Tuple[List[Any], Dict[str, Any]]:
def get_columns(self, connection: Connection,
table_name: str, schema: str = None, **kw) -> List[Dict[str, Any]]:
if not self.has_table(connection, table_name, schema):
raise exc.NoSuchTableError(f'schema={schema}, table={table_name}')
raise exc.NoSuchTableError(
'schema={schema}, table={table_name}'.format(schema=schema, table_name=table_name))
return self._get_columns(connection, table_name, schema, **kw)

def _get_columns(self, connection: Connection,
Expand Down Expand Up @@ -135,7 +135,7 @@ def get_schema_names(self, connection: Connection, **kw) -> List[str]:
def get_table_names(self, connection: Connection, schema: str = None, **kw) -> List[str]:
query = 'SHOW TABLES'
if schema:
query = f'{query} FROM {self.identifier_preparer.quote_identifier(schema)}'
query += ' FROM ' + self.identifier_preparer.quote_identifier(schema)
res = connection.execute(sql.text(query))
return [row.Table for row in res]

Expand All @@ -161,7 +161,7 @@ def get_temp_view_names(self, connection: Connection, schema: str = None, **kw)

def get_view_definition(self, connection: Connection, view_name: str, schema: str = None, **kw) -> str:
full_view = self._get_full_table(view_name, schema)
query = f'SHOW CREATE VIEW {full_view}'
query = 'SHOW CREATE VIEW {full_view}'.format(full_view=full_view)
try:
res = connection.execute(sql.text(query))
return res.scalar()
Expand All @@ -177,9 +177,11 @@ def get_view_definition(self, connection: Connection, view_name: str, schema: st
def get_indexes(self, connection: Connection,
table_name: str, schema: str = None, **kw) -> List[Dict[str, Any]]:
if not self.has_table(connection, table_name, schema):
raise exc.NoSuchTableError(f'schema={schema}, table={table_name}')
raise exc.NoSuchTableError(
'schema={schema}, table={table_name}'.format(schema=schema, table_name=table_name))

partitioned_columns = self._get_columns(connection, f'{table_name}$partitions', schema, **kw)
partitions_table = '{table_name}$partitions'.format(table_name=table_name)
partitioned_columns = self._get_columns(connection, partitions_table, schema, **kw)
partition_index = dict(
name='partition',
column_names=[col['name'] for col in partitioned_columns],
Expand All @@ -199,8 +201,8 @@ def get_check_constraints(self, connection: Connection,

def get_table_comment(self, connection: Connection,
table_name: str, schema: str = None, **kw) -> Dict[str, Any]:
properties_table = self._get_full_table(f"{table_name}$properties", schema)
query = f'SELECT "comment" FROM {properties_table}'
properties_table = self._get_full_table("{table_name}$properties".format(table_name=table_name), schema)
query = 'SELECT "comment" FROM {properties_table}'.format(properties_table=properties_table)
try:
res = connection.execute(sql.text(query))
return dict(text=res.scalar())
Expand All @@ -214,7 +216,7 @@ def get_table_comment(self, connection: Connection,
raise

def has_schema(self, connection: Connection, schema: str) -> bool:
query = f"SHOW SCHEMAS LIKE '{schema}'"
query = "SHOW SCHEMAS LIKE '{schema}'".format(schema=schema)
try:
res = connection.execute(sql.text(query))
return res.first() is not None
Expand All @@ -231,8 +233,8 @@ def has_table(self, connection: Connection,
table_name: str, schema: str = None) -> bool:
query = 'SHOW TABLES'
if schema:
query = f'{query} FROM {self.identifier_preparer.quote_identifier(schema)}'
query = f"{query} LIKE '{table_name}'"
query += ' FROM ' + self.identifier_preparer.quote_identifier(schema)
query += " LIKE '{table_name}'".format(table_name=table_name)
try:
res = connection.execute(sql.text(query))
return res.first() is not None
Expand All @@ -258,7 +260,7 @@ def _get_server_version_info(self, connection: Connection) -> Tuple[int, ...]:
return tuple([version])

def _get_default_schema_name(self, connection: Connection) -> Optional[str]:
dbapi_connection: trino_dbapi.Connection = connection.connection
dbapi_connection = connection.connection # type: trino_dbapi.Connection
return dbapi_connection.schema

def do_execute(self, cursor: Cursor, statement: str, parameters: Tuple[Any, ...],
Expand All @@ -268,7 +270,7 @@ def do_execute(self, cursor: Cursor, statement: str, parameters: Tuple[Any, ...]
# SQL statement only submitted to Trino server when cursor.fetch*() is called.
# For DDL (CREATE/ALTER/DROP) and DML (INSERT/UPDATE/DELETE) statement, call cursor.description
# to force submit statement immediately.
cursor.description
cursor.description # noqa

def do_rollback(self, dbapi_connection):
if dbapi_connection.transaction is not None:
Expand Down Expand Up @@ -306,6 +308,6 @@ def _get_full_table(self, table_name: str, schema: str = None, quote: bool = Tru
table_part = self.identifier_preparer.quote_identifier(table_name) if quote else table_name
if schema:
schema_part = self.identifier_preparer.quote_identifier(schema) if quote else schema
return f'{schema_part}.{table_part}'
return '{schema_part}.{table_part}'.format(schema_part=schema_part, table_part=table_part)

return table_part

0 comments on commit 8f68421

Please sign in to comment.