Moving away from using the root logger everywhere (#9099)

* Moving away from using the root logger everywhere

* self.logger -> logger
This commit is contained in:
Craig Rueda
2020-02-07 23:38:48 -08:00
committed by GitHub
parent 3cbe228dc1
commit 607cfd1f29
31 changed files with 213 additions and 169 deletions

View File

@@ -39,6 +39,7 @@ if TYPE_CHECKING:
QueryStatus = utils.QueryStatus
config = app.config
logger = logging.getLogger(__name__)
tracking_url_trans = conf.get("TRACKING_URL_TRANSFORMER")
hive_poll_interval = conf.get("HIVE_POLL_INTERVAL")
@@ -118,7 +119,7 @@ class HiveEngineSpec(PrestoEngineSpec):
bucket_path = config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"]
if not bucket_path:
logging.info("No upload bucket specified")
logger.info("No upload bucket specified")
raise Exception(
"No upload bucket specified. You can specify one in the config file."
)
@@ -224,7 +225,7 @@ class HiveEngineSpec(PrestoEngineSpec):
map_progress = int(match.groupdict()["map_progress"])
reduce_progress = int(match.groupdict()["reduce_progress"])
stages[stage_number] = (map_progress + reduce_progress) / 2
logging.info(
logger.info(
"Progress detail: {}, " # pylint: disable=logging-format-interpolation
"current job {}, "
"total jobs: {}".format(stages, current_job, total_jobs)
@@ -267,7 +268,7 @@ class HiveEngineSpec(PrestoEngineSpec):
if log:
log_lines = log.splitlines()
progress = cls.progress(log_lines)
logging.info(f"Query {query_id}: Progress total: {progress}")
logger.info(f"Query {query_id}: Progress total: {progress}")
needs_commit = False
if progress > query.progress:
query.progress = progress
@@ -276,22 +277,22 @@ class HiveEngineSpec(PrestoEngineSpec):
tracking_url = cls.get_tracking_url(log_lines)
if tracking_url:
job_id = tracking_url.split("/")[-2]
logging.info(
logger.info(
f"Query {query_id}: Found the tracking url: {tracking_url}"
)
tracking_url = tracking_url_trans(tracking_url)
logging.info(
logger.info(
f"Query {query_id}: Transformation applied: {tracking_url}"
)
query.tracking_url = tracking_url
logging.info(f"Query {query_id}: Job id: {job_id}")
logger.info(f"Query {query_id}: Job id: {job_id}")
needs_commit = True
if job_id and len(log_lines) > last_log_line:
# Wait for job id before logging things out
# this allows for prefixing all log lines and becoming
# searchable in something like Kibana
for l in log_lines[last_log_line:]:
logging.info(f"Query {query_id}: [{job_id}] {l}")
logger.info(f"Query {query_id}: [{job_id}] {l}")
last_log_line = len(log_lines)
if needs_commit:
session.commit()