Skip to content

Commit

Permalink
Fixed failing static checks
Browse files Browse the repository at this point in the history
For some reasons apache#43077 passed but main was failing. This PR fixes it.
  • Loading branch information
kaxil committed Oct 16, 2024
1 parent e78421d commit a993f93
Show file tree
Hide file tree
Showing 319 changed files with 560 additions and 317 deletions.
5 changes: 3 additions & 2 deletions providers/tests/alibaba/cloud/log/test_oss_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,14 @@
from unittest.mock import PropertyMock

import pytest
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.db import clear_db_dags, clear_db_runs

from airflow.providers.alibaba.cloud.log.oss_task_handler import OSSTaskHandler
from airflow.utils.state import TaskInstanceState
from airflow.utils.timezone import datetime

from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.db import clear_db_dags, clear_db_runs

pytestmark = pytest.mark.db_test

OSS_TASK_HANDLER_STRING = "airflow.providers.alibaba.cloud.log.oss_task_handler.{}"
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/auth_manager/avp/test_facade.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@
from unittest.mock import Mock

import pytest
from tests_common.test_utils.config import conf_vars

from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities, get_action_id, get_entity_type
from airflow.providers.amazon.aws.auth_manager.avp.facade import AwsAuthManagerAmazonVerifiedPermissionsFacade
from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser
from airflow.utils.helpers import prune_dict

from tests_common.test_utils.config import conf_vars

if TYPE_CHECKING:
from airflow.auth.managers.base_auth_manager import ResourceMethod

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@
from unittest.mock import ANY, Mock, patch

import pytest
from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS
from tests_common.test_utils.config import conf_vars

from airflow.cli import cli_parser
from airflow.providers.amazon.aws.auth_manager.cli.avp_commands import init_avp, update_schema

from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS
from tests_common.test_utils.config import conf_vars

mock_boto3 = Mock()

pytestmark = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import pytest
from flask import Flask

from tests_common.test_utils.compat import ignore_provider_compatibility_error

python3_saml = pytest.importorskip("python3-saml")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,6 @@
import pytest
from flask import Flask, session
from flask_appbuilder.menu import MenuItem
from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.www import check_content_in_response

from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
from airflow.providers.amazon.aws.auth_manager.avp.facade import AwsAuthManagerAmazonVerifiedPermissionsFacade
Expand All @@ -42,6 +39,10 @@
from airflow.www import app as application
from airflow.www.extensions.init_appbuilder import init_appbuilder

from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.www import check_content_in_response

try:
from airflow.auth.managers.models.resource_details import (
AccessView,
Expand Down
5 changes: 3 additions & 2 deletions providers/tests/amazon/aws/auth_manager/views/test_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@

import pytest
from flask import session, url_for
from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS
from tests_common.test_utils.config import conf_vars

from airflow.exceptions import AirflowException
from airflow.www import app as application

from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS
from tests_common.test_utils.config import conf_vars

pytestmark = [
pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Test requires Airflow 2.9+"),
pytest.mark.skip_if_database_isolation_mode,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
import yaml
from botocore.exceptions import ClientError, NoCredentialsError
from semver import VersionInfo
from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES
from tests_common.test_utils.config import conf_vars

from airflow.exceptions import AirflowException
from airflow.executors.base_executor import BaseExecutor
Expand All @@ -48,6 +46,9 @@
from airflow.utils.state import State
from airflow.version import version as airflow_version_str

from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES
from tests_common.test_utils.config import conf_vars

airflow_version = VersionInfo(*map(int, airflow_version_str.split(".")[:3]))
ARN1 = "arn1"

Expand Down
7 changes: 4 additions & 3 deletions providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,6 @@
from botocore.exceptions import ClientError
from inflection import camelize
from semver import VersionInfo
from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES
from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS
from tests_common.test_utils.config import conf_vars

from airflow.exceptions import AirflowException
from airflow.executors.base_executor import BaseExecutor
Expand All @@ -60,6 +57,10 @@
from airflow.utils.timezone import utcnow
from airflow.version import version as airflow_version_str

from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES
from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS
from tests_common.test_utils.config import conf_vars

pytestmark = pytest.mark.db_test

airflow_version = VersionInfo(*map(int, airflow_version_str.split(".")[:3]))
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/hooks/test_base_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
from botocore.utils import FileWebIdentityTokenLoader
from moto import mock_aws
from moto.core import DEFAULT_ACCOUNT_ID
from tests_common.test_utils.config import conf_vars

from airflow.exceptions import AirflowException
from airflow.models.connection import Connection
Expand All @@ -51,6 +50,8 @@
)
from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper

from tests_common.test_utils.config import conf_vars

pytest.importorskip("aiobotocore")

MOCK_AWS_CONN_ID = "mock-conn-id"
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/hooks/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import pytest
from botocore.exceptions import ClientError
from moto import mock_aws
from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS

from airflow.exceptions import AirflowException
from airflow.models import Connection
Expand All @@ -44,6 +43,8 @@
)
from airflow.utils.timezone import datetime

from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS


@pytest.fixture
def mocked_s3_res():
Expand Down
5 changes: 3 additions & 2 deletions providers/tests/amazon/aws/links/test_base_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,14 @@
from unittest import mock

import pytest
from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.mock_operators import MockOperator

from airflow.models.xcom import XCom
from airflow.providers.amazon.aws.links.base_aws import BaseAwsLink
from airflow.serialization.serialized_objects import SerializedDAG

from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.mock_operators import MockOperator

if TYPE_CHECKING:
from airflow.models.taskinstance import TaskInstance

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import boto3
import pytest
from moto import mock_aws
from tests_common.test_utils.config import conf_vars
from watchtower import CloudWatchLogHandler

from airflow.models import DAG, DagRun, TaskInstance
Expand All @@ -37,6 +36,8 @@
from airflow.utils.state import State
from airflow.utils.timezone import datetime

from tests_common.test_utils.config import conf_vars


def get_time_str(time_in_milliseconds):
dt_time = dt.fromtimestamp(time_in_milliseconds / 1000.0, tz=timezone.utc)
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/log/test_s3_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import pytest
from botocore.exceptions import ClientError
from moto import mock_aws
from tests_common.test_utils.config import conf_vars

from airflow.models import DAG, DagRun, TaskInstance
from airflow.operators.empty import EmptyOperator
Expand All @@ -35,6 +34,8 @@
from airflow.utils.state import State, TaskInstanceState
from airflow.utils.timezone import datetime

from tests_common.test_utils.config import conf_vars


@pytest.fixture(autouse=True)
def s3mock():
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/secrets/test_systems_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@

import pytest
from moto import mock_aws
from tests_common.test_utils.config import conf_vars

from airflow.configuration import initialize_secrets_backends
from airflow.providers.amazon.aws.secrets.systems_manager import SystemsManagerParameterStoreBackend

from tests_common.test_utils.config import conf_vars

URI_CONNECTION = pytest.param(
"postgres://my-login:my-pass@my-host:5432/my-schema?param1=val1&param2=val2", id="uri-connection"
)
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/transfers/test_redshift_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,14 @@

import pytest
from boto3.session import Session
from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces

from airflow.exceptions import AirflowException
from airflow.models.connection import Connection
from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator
from airflow.providers.amazon.aws.utils.redshift import build_credentials_block

from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces


class TestRedshiftToS3Transfer:
@pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, "key/table_"], [False, "key"]])
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/transfers/test_s3_to_redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

import pytest
from boto3.session import Session
from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces

from airflow.exceptions import AirflowException
from airflow.models.connection import Connection
Expand All @@ -35,6 +34,8 @@
SchemaDatasetFacetFields,
)

from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces


class TestS3ToRedshiftTransfer:
@mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection")
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/transfers/test_s3_to_sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import boto3
import pytest
from moto import mock_aws
from tests_common.test_utils.config import conf_vars

from airflow.models import DAG
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
Expand All @@ -29,6 +28,8 @@
from airflow.providers.ssh.operators.ssh import SSHOperator
from airflow.utils.timezone import datetime

from tests_common.test_utils.config import conf_vars

pytestmark = pytest.mark.db_test


Expand Down
3 changes: 2 additions & 1 deletion providers/tests/amazon/aws/transfers/test_sftp_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import boto3
import pytest
from moto import mock_aws
from tests_common.test_utils.config import conf_vars

from airflow.models import DAG
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
Expand All @@ -29,6 +28,8 @@
from airflow.providers.ssh.operators.ssh import SSHOperator
from airflow.utils.timezone import datetime

from tests_common.test_utils.config import conf_vars

pytestmark = pytest.mark.db_test

BUCKET = "test-bucket"
Expand Down
2 changes: 1 addition & 1 deletion providers/tests/apache/hive/hooks/test_hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import pandas as pd
import pytest
from hmsclient import HMSClient
from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces

from airflow.exceptions import AirflowException
from airflow.models.connection import Connection
Expand All @@ -42,6 +41,7 @@
MockHiveServer2Hook,
MockSubProcess,
)
from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces

DEFAULT_DATE = timezone.datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/apache/livy/hooks/test_livy.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,14 @@
import requests
from aiohttp import ClientResponseError, RequestInfo
from requests.exceptions import RequestException
from tests_common.test_utils.db import clear_db_connections

from airflow.exceptions import AirflowException
from airflow.models import Connection
from airflow.providers.apache.livy.hooks.livy import BatchState, LivyAsyncHook, LivyHook
from airflow.utils import db

from tests_common.test_utils.db import clear_db_connections

pytestmark = pytest.mark.skip_if_database_isolation_mode

LIVY_CONN_ID = LivyHook.default_conn_name
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/apache/spark/hooks/test_spark_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,14 @@
from unittest.mock import call, patch

import pytest
from tests_common.test_utils.db import clear_db_connections

from airflow.exceptions import AirflowException
from airflow.models import Connection
from airflow.providers.apache.spark.hooks.spark_sql import SparkSqlHook
from airflow.utils import db

from tests_common.test_utils.db import clear_db_connections

pytestmark = pytest.mark.db_test


Expand Down
3 changes: 2 additions & 1 deletion providers/tests/atlassian/jira/hooks/test_jira.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@
from unittest import mock

import pytest
from tests_common.test_utils.compat import connection_as_json

from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.models import Connection
from airflow.providers.atlassian.jira.hooks.jira import JiraHook

from tests_common.test_utils.compat import connection_as_json


@pytest.fixture
def mocked_jira_client():
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/atlassian/jira/operators/test_jira.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@
from unittest import mock

import pytest
from tests_common.test_utils.compat import connection_as_json

from airflow.models import Connection
from airflow.providers.atlassian.jira.operators.jira import JiraOperator
from airflow.utils import timezone

from tests_common.test_utils.compat import connection_as_json

DEFAULT_DATE = timezone.datetime(2017, 1, 1)
MINIMAL_TEST_TICKET = {
"id": "911539",
Expand Down
3 changes: 2 additions & 1 deletion providers/tests/atlassian/jira/sensors/test_jira.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@
from unittest import mock

import pytest
from tests_common.test_utils.compat import connection_as_json

from airflow.models import Connection
from airflow.providers.atlassian.jira.sensors.jira import JiraTicketSensor
from airflow.utils import timezone

from tests_common.test_utils.compat import connection_as_json

DEFAULT_DATE = timezone.datetime(2017, 1, 1)
MINIMAL_TEST_TICKET = {
"id": "911539",
Expand Down
Loading

0 comments on commit a993f93

Please sign in to comment.