Skip to content

Commit

Permalink
DEV-10756 removed unused imports that were being flagged by flake8
Browse files Browse the repository at this point in the history
  • Loading branch information
ayubshahab committed Mar 18, 2024
1 parent 6a0e203 commit 602c120
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 13 deletions.
3 changes: 1 addition & 2 deletions usaspending_api/common/helpers/download_csv_strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,6 @@ def download_to_csv(
# we do not want to force all containers where
# other strategies run to have pyspark installed when the strategy
# doesn't require it.
from pyspark.sql import SparkSession
from usaspending_api.common.etl.spark import hadoop_copy_merge, write_csv_file
from usaspending_api.common.helpers.spark_helpers import configure_spark_session, get_active_spark_session

Expand All @@ -149,7 +148,7 @@ def download_to_csv(
self.spark_created_by_command = False
if not self.spark:
self.spark_created_by_command = True
self.spark = configure_spark_session(**extra_conf, spark_context=self.spark) # type: SparkSession
self.spark = configure_spark_session(**extra_conf, spark_context=self.spark)
if source_df is not None:
df = source_df
else:
Expand Down
3 changes: 1 addition & 2 deletions usaspending_api/common/helpers/spark_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from pyspark.java_gateway import launch_gateway
from pyspark.serializers import read_int, UTF8Deserializer
from pyspark.sql import SparkSession
from pyspark.sql.conf import RuntimeConfig
from typing import Sequence, Set

from usaspending_api.awards.delta_models.awards import AWARDS_COLUMNS
Expand Down Expand Up @@ -247,7 +246,7 @@ def configure_spark_session(
# Now that the SparkSession was created, check whether certain provided config values were ignored if given a
# pre-existing SparkContext, and error-out if so
if spark_context:
built_conf = spark.conf # type: RuntimeConfig
built_conf = spark.conf
provided_conf_keys = [item[0] for item in conf.getAll()]
non_modifiable_conf = [k for k in provided_conf_keys if not built_conf.isModifiable(k)]
if non_modifiable_conf:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from django.core.management.base import BaseCommand
from pyspark.sql import SparkSession

from usaspending_api.config import CONFIG
from usaspending_api.common.helpers.spark_helpers import (
Expand Down Expand Up @@ -64,7 +63,7 @@ def handle(self, *args, **options):
spark_created_by_command = False
if not spark:
spark_created_by_command = True
spark = configure_spark_session(**extra_conf, spark_context=spark) # type: SparkSession
spark = configure_spark_session(**extra_conf, spark_context=spark)

# Setup Logger
logger = get_jvm_logger(spark)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import logging

from pyspark.sql import SparkSession

from usaspending_api.common.helpers.spark_helpers import get_active_spark_session, configure_spark_session
from usaspending_api.etl.management.commands.elasticsearch_indexer import AbstractElasticsearchIndexer
Expand Down Expand Up @@ -35,6 +34,6 @@ def create_controller(self, config: dict) -> AbstractElasticsearchIndexerControl
spark = get_active_spark_session()
if not spark:
spark_created_by_command = True
spark = configure_spark_session(**extra_conf, spark_context=spark) # type: SparkSession
spark = configure_spark_session(**extra_conf, spark_context=spark)

return DeltaLakeElasticsearchIndexerController(config, spark, spark_created_by_command)
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from django.core.management import BaseCommand
from pyspark.sql import SparkSession

from usaspending_api.awards.delta_models import (
AWARDS_COLUMNS,
Expand Down Expand Up @@ -351,8 +350,7 @@ def handle(self, *args, **options):
spark_created_by_command = False
if not spark:
spark_created_by_command = True
spark = configure_spark_session(**extra_conf, spark_context=spark) # type: SparkSession

spark = configure_spark_session(**extra_conf, spark_context=spark)
# Setup Logger
logger = get_jvm_logger(spark)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from datetime import datetime

from django.core.management import BaseCommand, call_command
from pyspark.sql import SparkSession

from usaspending_api.broker.helpers.last_load_date import get_last_load_date, update_last_load_date
from usaspending_api.common.etl.spark import create_ref_temp_views
Expand Down Expand Up @@ -49,7 +48,7 @@ def handle(self, *args, **options):
spark_created_by_command = False
if not spark:
spark_created_by_command = True
spark = configure_spark_session(**extra_conf, spark_context=spark) # type: SparkSession
spark = configure_spark_session(**extra_conf, spark_context=spark)

# Setup Logger
logger = get_jvm_logger(spark)
Expand Down

0 comments on commit 602c120

Please sign in to comment.