mirror of
https://github.com/apache/superset.git
synced 2026-04-19 08:04:53 +00:00
Fix lint in superset/db_engine_spec (#8338)
* Enable lint checking for files in db_engine_spec that have few to no lint issues * Enable lint and fix issue in db_engine_spec/mysql.py * Enable pylint and fix lint for db_engine_spec/pinot.py * Enable lint and fix issues for db_engine_specs/hive.py * Enable lint and fix for db_engine_spec/presto.py * Re-enable lint on base.py, fix/disable specific failures, including one bad method signature * Make flake8 happy after a number of pylint fixes * Update db_engine_spec_test test cases related to Presto to support different method naming * automated reformatting * One more pylint disable for druid.py * Find the magic invocation that makes all the lint tools happy
This commit is contained in:
committed by
Beto Dealmeida
parent
65a05ca47e
commit
ec86d9de17
@@ -14,7 +14,6 @@
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=C,R,W
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
@@ -81,9 +80,9 @@ class HiveEngineSpec(PrestoEngineSpec):
|
||||
|
||||
@classmethod
|
||||
def get_all_datasource_names(
|
||||
cls, db, datasource_type: str
|
||||
cls, database, datasource_type: str
|
||||
) -> List[utils.DatasourceName]:
|
||||
return BaseEngineSpec.get_all_datasource_names(db, datasource_type)
|
||||
return BaseEngineSpec.get_all_datasource_names(database, datasource_type)
|
||||
|
||||
@classmethod
|
||||
def fetch_data(cls, cursor, limit: int) -> List[Tuple]:
|
||||
@@ -99,7 +98,7 @@ class HiveEngineSpec(PrestoEngineSpec):
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def create_table_from_csv(cls, form, table):
|
||||
def create_table_from_csv(cls, form, table): # pylint: disable=too-many-locals
|
||||
"""Uploads a csv file and creates a superset datasource in Hive."""
|
||||
|
||||
def convert_to_hive_type(col_type):
|
||||
@@ -223,7 +222,7 @@ class HiveEngineSpec(PrestoEngineSpec):
|
||||
reduce_progress = int(match.groupdict()["reduce_progress"])
|
||||
stages[stage_number] = (map_progress + reduce_progress) / 2
|
||||
logging.info(
|
||||
"Progress detail: {}, "
|
||||
"Progress detail: {}, " # pylint: disable=logging-format-interpolation
|
||||
"current job {}, "
|
||||
"total jobs: {}".format(stages, current_job, total_jobs)
|
||||
)
|
||||
@@ -239,9 +238,10 @@ class HiveEngineSpec(PrestoEngineSpec):
|
||||
for line in log_lines:
|
||||
if lkp in line:
|
||||
return line.split(lkp)[1]
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def handle_cursor(cls, cursor, query, session):
|
||||
def handle_cursor(cls, cursor, query, session): # pylint: disable=too-many-locals
|
||||
"""Updates progress information"""
|
||||
from pyhive import hive # pylint: disable=no-name-in-module
|
||||
|
||||
@@ -302,33 +302,33 @@ class HiveEngineSpec(PrestoEngineSpec):
|
||||
return inspector.get_columns(table_name, schema)
|
||||
|
||||
@classmethod
|
||||
def where_latest_partition(
|
||||
def where_latest_partition( # pylint: disable=too-many-arguments
|
||||
cls,
|
||||
table_name: str,
|
||||
schema: Optional[str],
|
||||
database,
|
||||
qry: Select,
|
||||
query: Select,
|
||||
columns: Optional[List] = None,
|
||||
) -> Optional[Select]:
|
||||
try:
|
||||
col_names, values = cls.latest_partition(
|
||||
table_name, schema, database, show_first=True
|
||||
)
|
||||
except Exception:
|
||||
except Exception: # pylint: disable=broad-except
|
||||
# table is not partitioned
|
||||
return None
|
||||
if values is not None and columns is not None:
|
||||
for col_name, value in zip(col_names, values):
|
||||
for c in columns:
|
||||
if c.get("name") == col_name:
|
||||
qry = qry.where(Column(col_name) == value)
|
||||
for clm in columns:
|
||||
if clm.get("name") == col_name:
|
||||
query = query.where(Column(col_name) == value)
|
||||
|
||||
return qry
|
||||
return query
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _get_fields(cls, cols: List[dict]) -> List[ColumnClause]:
|
||||
return BaseEngineSpec._get_fields(cols)
|
||||
return BaseEngineSpec._get_fields(cols) # pylint: disable=protected-access
|
||||
|
||||
@classmethod
|
||||
def latest_sub_partition(cls, table_name, schema, database, **kwargs):
|
||||
@@ -343,11 +343,13 @@ class HiveEngineSpec(PrestoEngineSpec):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _partition_query(cls, table_name, limit=0, order_by=None, filters=None):
|
||||
def _partition_query( # pylint: disable=too-many-arguments
|
||||
cls, table_name, database, limit=0, order_by=None, filters=None
|
||||
):
|
||||
return f"SHOW PARTITIONS {table_name}"
|
||||
|
||||
@classmethod
|
||||
def select_star(
|
||||
def select_star( # pylint: disable=too-many-arguments
|
||||
cls,
|
||||
database,
|
||||
table_name: str,
|
||||
@@ -413,6 +415,8 @@ class HiveEngineSpec(PrestoEngineSpec):
|
||||
return configuration
|
||||
|
||||
@staticmethod
|
||||
def execute(cursor, query: str, async_: bool = False):
|
||||
def execute(
|
||||
cursor, query: str, async_: bool = False
|
||||
): # pylint: disable=arguments-differ
|
||||
kwargs = {"async": async_}
|
||||
cursor.execute(query, **kwargs)
|
||||
|
||||
Reference in New Issue
Block a user