From 28b53e20736993dd3f513e449d44ab38986832d9 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Thu, 24 Jan 2019 00:21:24 +0000 Subject: [PATCH 0001/1104] [AIRFLOW-XXX] Pin version of Pip in tests to work around pypa/pip#6163 (#4576) There is a bug or a new feature that causes a number of our dependencies to fail to install. --- .travis.yml | 2 +- Dockerfile | 2 +- scripts/ci/kubernetes/docker/Dockerfile | 2 +- tox.ini | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 74cce5f521735..6a7e308fa775c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -64,7 +64,7 @@ install: - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose - chmod +x docker-compose - sudo mv docker-compose /usr/local/bin - - pip install --upgrade pip + - pip install --upgrade script: - if [ -z "$KUBERNETES_VERSION" ]; then docker-compose --log-level ERROR -f scripts/ci/docker-compose.yml run airflow-testing /app/scripts/ci/run-ci.sh; fi - if [ ! -z "$KUBERNETES_VERSION" ]; then diff --git a/Dockerfile b/Dockerfile index 832cce270e6ee..d52e7d456c4c3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,7 +29,7 @@ RUN set -x \ && apt update \ && if [ -n "${APT_DEPS}" ]; then apt install -y $APT_DEPS; fi \ && if [ -n "${PYTHON_DEPS}" ]; then pip install --no-cache-dir ${PYTHON_DEPS}; fi \ - && pip install --no-cache-dir -e .[$AIRFLOW_DEPS] \ + && pip install --no-cache-dir --no-use-pep517 -e .[$AIRFLOW_DEPS] \ && apt purge --auto-remove -yqq $buildDeps \ && apt autoremove -yqq --purge \ && apt clean diff --git a/scripts/ci/kubernetes/docker/Dockerfile b/scripts/ci/kubernetes/docker/Dockerfile index 387e7d3804740..aca828746b27f 100644 --- a/scripts/ci/kubernetes/docker/Dockerfile +++ b/scripts/ci/kubernetes/docker/Dockerfile @@ -43,7 +43,7 @@ RUN pip install -U setuptools && \ # install airflow COPY airflow.tar.gz /tmp/airflow.tar.gz -RUN pip install /tmp/airflow.tar.gz +RUN pip install --no-use-pep517 /tmp/airflow.tar.gz COPY airflow-test-env-init.sh /tmp/airflow-test-env-init.sh diff --git a/tox.ini b/tox.ini index 2f4304a4fba9a..e83094dc178a5 100644 --- a/tox.ini +++ b/tox.ini @@ -54,8 +54,8 @@ setenv = passenv = * commands = - pip wheel --progress-bar off -w {homedir}/.wheelhouse -f {homedir}/.wheelhouse -e .[devel_ci] - pip install --progress-bar off --find-links={homedir}/.wheelhouse --no-index -e .[devel_ci] + pip wheel --no-use-pep517 --progress-bar off -w {homedir}/.wheelhouse -f {homedir}/.wheelhouse -e .[devel_ci] + pip install --no-use-pep517 --progress-bar off --find-links={homedir}/.wheelhouse --no-index -e .[devel_ci] env_docker: {toxinidir}/scripts/ci/1-setup-env.sh env_docker: {toxinidir}/scripts/ci/2-setup-kdc.sh backend_mysql: {toxinidir}/scripts/ci/3-setup-mysql.sh From ca846c13f3e0d6facad41e3be54594272e004920 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Thu, 24 Jan 2019 10:50:42 +0000 Subject: [PATCH 0002/1104] [AIRFLOW-XXX] Only upgrade pip on Travis, not everything This was a mistake I introduced in #4576 that I only noticed after that was merged --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 6a7e308fa775c..74cce5f521735 100644 --- a/.travis.yml +++ b/.travis.yml @@ -64,7 +64,7 @@ install: - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose - chmod +x docker-compose - sudo mv docker-compose /usr/local/bin - - pip install --upgrade + - pip install --upgrade pip script: - if [ -z "$KUBERNETES_VERSION" ]; then docker-compose --log-level ERROR -f scripts/ci/docker-compose.yml run airflow-testing /app/scripts/ci/run-ci.sh; fi - if [ ! -z "$KUBERNETES_VERSION" ]; then From c69619e9cf823cdc4abd3874a296ce22b16349d3 Mon Sep 17 00:00:00 2001 From: zhongjiajie Date: Thu, 24 Jan 2019 19:19:43 +0800 Subject: [PATCH 0003/1104] [AIRFLOW-XXX] Fix spark submit hook KeyError (#4578) Fix string format KeyError in spark_submit_hook.py --- airflow/contrib/hooks/spark_submit_hook.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/contrib/hooks/spark_submit_hook.py b/airflow/contrib/hooks/spark_submit_hook.py index db37e75a9498c..33fb55af06e34 100644 --- a/airflow/contrib/hooks/spark_submit_hook.py +++ b/airflow/contrib/hooks/spark_submit_hook.py @@ -143,7 +143,7 @@ def __init__(self, self._is_kubernetes = 'k8s' in self._connection['master'] if self._is_kubernetes and kube_client is None: raise RuntimeError( - "{master} specified by kubernetes dependencies are not installed!".format( + "{} specified by kubernetes dependencies are not installed!".format( self._connection['master'])) self._should_track_driver_status = self._resolve_should_track_driver_status() From 7f8099ef134d8c930fb1c9b48c9cd0e78d5367fb Mon Sep 17 00:00:00 2001 From: Benjamin Goldberg Date: Thu, 24 Jan 2019 15:15:34 -0600 Subject: [PATCH 0004/1104] [AIRFLOW-XXX] Adds SpotHero as a user of Airflow (#4581) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index e782e6bce1035..92b50a68833b7 100644 --- a/README.md +++ b/README.md @@ -300,6 +300,7 @@ Currently **officially** using Airflow: 1. [SocialCops](https://www.socialcops.com/) [[@vinayak-mehta](https://github.com/vinayak-mehta) & [@sharky93](https://github.com/sharky93)] 1. [Société générale](https://www.societegenerale.fr/) [[@medmrgh](https://github.com/medmrgh) & [@s83](https://github.com/s83)] 1. [Spotahome](https://www.spotahome.com/) [[@spotahome](https://github.com/spotahome)] +1. [SpotHero](https://github.com/spothero) [[@benjigoldberg](https://github.com/benjigoldberg)] 1. [Spotify](https://github.com/spotify) [[@znichols](https://github.com/znichols)] 1. [Square](https://squareup.com/) 1. [Stackspace](https://beta.stackspace.io/) From 993779f93df172f5f774f1ea2634c55e9c1212c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Fri, 25 Jan 2019 10:59:40 +0100 Subject: [PATCH 0005/1104] [AIRFLOW-XXX] Reduction of the number of warnings in the documentation (#4585) --- .../contrib/hooks/aws_glue_catalog_hook.py | 2 +- .../hooks/azure_container_instance_hook.py | 2 +- .../hooks/azure_container_volume_hook.py | 2 +- airflow/contrib/hooks/cassandra_hook.py | 15 +- airflow/contrib/hooks/ftp_hook.py | 3 +- airflow/contrib/hooks/gcp_bigtable_hook.py | 5 +- airflow/contrib/hooks/gcp_compute_hook.py | 2 +- airflow/contrib/hooks/gcp_container_hook.py | 17 +- airflow/contrib/hooks/gcp_mlengine_hook.py | 2 + airflow/contrib/hooks/gcp_pubsub_hook.py | 5 +- airflow/contrib/hooks/gcp_spanner_hook.py | 7 +- airflow/contrib/hooks/gcs_hook.py | 1 + airflow/contrib/hooks/mongo_hook.py | 3 +- airflow/contrib/hooks/sftp_hook.py | 16 +- airflow/contrib/hooks/spark_jdbc_hook.py | 2 +- airflow/contrib/hooks/spark_sql_hook.py | 2 + airflow/contrib/hooks/wasb_hook.py | 2 +- airflow/contrib/operators/adls_to_gcs.py | 3 + .../azure_container_instances_operator.py | 4 +- .../contrib/operators/bigquery_to_bigquery.py | 6 +- .../contrib/operators/databricks_operator.py | 2 + airflow/contrib/operators/gcs_operator.py | 28 +-- airflow/contrib/operators/gcs_to_gcs.py | 75 ++++---- .../operators/jenkins_job_trigger_operator.py | 8 +- .../contrib/operators/mlengine_operator.py | 25 ++- .../operators/qubole_check_operator.py | 3 +- airflow/contrib/operators/sqoop_operator.py | 2 +- .../operators/wasb_delete_blob_operator.py | 2 +- airflow/contrib/sensors/python_sensor.py | 4 +- airflow/contrib/sensors/weekday_sensor.py | 2 + airflow/hooks/dbapi_hook.py | 2 +- airflow/hooks/http_hook.py | 9 +- airflow/hooks/mysql_hook.py | 2 +- airflow/hooks/oracle_hook.py | 8 +- airflow/models/__init__.py | 24 +-- .../operators/s3_file_transform_operator.py | 1 + airflow/task/task_runner/__init__.py | 2 +- airflow/task/task_runner/base_task_runner.py | 2 +- airflow/utils/dag_processing.py | 2 +- airflow/utils/helpers.py | 6 +- docs/code.rst | 59 +++++-- docs/conf.py | 11 +- docs/howto/operator.rst | 4 - docs/howto/secure-connections.rst | 14 +- docs/integration.rst | 162 ++++++++++++++++-- docs/kubernetes.rst | 4 + docs/plugins.rst | 1 - docs/profiling.rst | 1 + docs/security.rst | 6 +- 49 files changed, 395 insertions(+), 177 deletions(-) diff --git a/airflow/contrib/hooks/aws_glue_catalog_hook.py b/airflow/contrib/hooks/aws_glue_catalog_hook.py index 687f0fddb99c5..00501d3ddabc4 100644 --- a/airflow/contrib/hooks/aws_glue_catalog_hook.py +++ b/airflow/contrib/hooks/aws_glue_catalog_hook.py @@ -70,7 +70,7 @@ def get_partitions(self, :type max_items: int :return: set of partition values where each value is a tuple since a partition may be composed of multiple columns. For example: - {('2018-01-01','1'), ('2018-01-01','2')} + ``{('2018-01-01','1'), ('2018-01-01','2')}`` """ config = { 'PageSize': page_size, diff --git a/airflow/contrib/hooks/azure_container_instance_hook.py b/airflow/contrib/hooks/azure_container_instance_hook.py index cc8073fe58077..95386d55ffcbb 100644 --- a/airflow/contrib/hooks/azure_container_instance_hook.py +++ b/airflow/contrib/hooks/azure_container_instance_hook.py @@ -101,7 +101,7 @@ def get_state_exitcode_details(self, resource_group, name): :param name: the name of the container group :type name: str :return: A tuple with the state, exitcode, and details. - If the exitcode is unknown 0 is returned. + If the exitcode is unknown 0 is returned. :rtype: tuple(state,exitcode,details) """ current_state = self._get_instance_view(resource_group, name).current_state diff --git a/airflow/contrib/hooks/azure_container_volume_hook.py b/airflow/contrib/hooks/azure_container_volume_hook.py index 5bf349106433d..c81c5190776c6 100644 --- a/airflow/contrib/hooks/azure_container_volume_hook.py +++ b/airflow/contrib/hooks/azure_container_volume_hook.py @@ -27,7 +27,7 @@ class AzureContainerVolumeHook(BaseHook): A hook which wraps an Azure Volume. :param wasb_conn_id: connection id of a Azure storage account of - which file shares should be mounted + which file shares should be mounted :type wasb_conn_id: str """ diff --git a/airflow/contrib/hooks/cassandra_hook.py b/airflow/contrib/hooks/cassandra_hook.py index 2c744fe9b11a8..08def3a5b775c 100644 --- a/airflow/contrib/hooks/cassandra_hook.py +++ b/airflow/contrib/hooks/cassandra_hook.py @@ -36,14 +36,17 @@ class CassandraHook(BaseHook, LoggingMixin): Port can be specified in the port field of the connection. If SSL is enabled in Cassandra, pass in a dict in the extra field as kwargs for - ``ssl.wrap_socket()``. For example: - { - 'ssl_options' : { - 'ca_certs' : PATH_TO_CA_CERTS - } + ``ssl.wrap_socket()``. For example:: + + { + 'ssl_options' : { + 'ca_certs' : PATH_TO_CA_CERTS } + } + + Default load balancing policy is RoundRobinPolicy. To specify a different + LB policy:: - Default load balancing policy is RoundRobinPolicy. To specify a different LB policy: - DCAwareRoundRobinPolicy { 'load_balancing_policy': 'DCAwareRoundRobinPolicy', diff --git a/airflow/contrib/hooks/ftp_hook.py b/airflow/contrib/hooks/ftp_hook.py index 03849012a3d5b..d67ebf82bc4c6 100644 --- a/airflow/contrib/hooks/ftp_hook.py +++ b/airflow/contrib/hooks/ftp_hook.py @@ -173,7 +173,8 @@ def retrieve_file( [default: output_handle.write()] :type callback: callable - Example:: + :Example:: + hook = FTPHook(ftp_conn_id='my_conn') remote_path = '/path/to/remote/file' diff --git a/airflow/contrib/hooks/gcp_bigtable_hook.py b/airflow/contrib/hooks/gcp_bigtable_hook.py index a8b775c980f5c..aeca2b475f566 100644 --- a/airflow/contrib/hooks/gcp_bigtable_hook.py +++ b/airflow/contrib/hooks/gcp_bigtable_hook.py @@ -174,7 +174,7 @@ def create_table(instance, column_families=None): """ Creates the specified Cloud Bigtable table. - Raises google.api_core.exceptions.AlreadyExists if the table exists. + Raises ``google.api_core.exceptions.AlreadyExists`` if the table exists. :type instance: Instance :param instance: The Cloud Bigtable instance that owns the table. @@ -185,8 +185,7 @@ def create_table(instance, initially split the table. :type column_families: dict :param column_families: (Optional) A map of columns to create. The key is the - column_id str, and the - value is a GarbageCollectionRule. + column_id str, and the value is a ``GarbageCollectionRule``. """ if column_families is None: column_families = {} diff --git a/airflow/contrib/hooks/gcp_compute_hook.py b/airflow/contrib/hooks/gcp_compute_hook.py index 7dd17d18b867c..02e0f56bf7697 100644 --- a/airflow/contrib/hooks/gcp_compute_hook.py +++ b/airflow/contrib/hooks/gcp_compute_hook.py @@ -271,7 +271,7 @@ def patch_instance_group_manager(self, zone, resource_id, Compute Engine Instance exists. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str - :return None + :return: None """ response = self.get_conn().instanceGroupManagers().patch( project=project_id, diff --git a/airflow/contrib/hooks/gcp_container_hook.py b/airflow/contrib/hooks/gcp_container_hook.py index 4a610e56c9501..30096c14f4d7c 100644 --- a/airflow/contrib/hooks/gcp_container_hook.py +++ b/airflow/contrib/hooks/gcp_container_hook.py @@ -112,10 +112,11 @@ def _append_label(cluster_proto, key, val): """ Append labels to provided Cluster Protobuf - Labels must fit the regex [a-z]([-a-z0-9]*[a-z0-9])? (current airflow version - string follows semantic versioning spec: x.y.z). + Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current + airflow version string follows semantic versioning spec: x.y.z). - :param cluster_proto: The proto to append resource_label airflow version to + :param cluster_proto: The proto to append resource_label airflow + version to :type cluster_proto: google.cloud.container_v1.types.Cluster :param key: The key label :type key: str @@ -170,12 +171,14 @@ def create_cluster(self, cluster, project_id=None, retry=DEFAULT, timeout=DEFAUL Creates a cluster, consisting of the specified number and type of Google Compute Engine instances. - :param cluster: A Cluster protobuf or dict. If dict is provided, it must be of - the same form as the protobuf message google.cloud.container_v1.types.Cluster + :param cluster: A Cluster protobuf or dict. If dict is provided, it must + be of the same form as the protobuf message + ``google.cloud.container_v1.types.Cluster`` :type cluster: dict or google.cloud.container_v1.types.Cluster :param project_id: Google Cloud Platform project ID :type project_id: str - :param retry: A retry object (google.api_core.retry.Retry) used to retry requests. + :param retry: A retry object (``google.api_core.retry.Retry``) used to + retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to @@ -183,7 +186,7 @@ def create_cluster(self, cluster, project_id=None, retry=DEFAULT, timeout=DEFAUL individual attempt. :type timeout: float :return: The full url to the new, or existing, cluster - :raises + :raises: ParseError: On JSON parsing problems when trying to convert dict AirflowException: cluster is not dict type nor Cluster proto type """ diff --git a/airflow/contrib/hooks/gcp_mlengine_hook.py b/airflow/contrib/hooks/gcp_mlengine_hook.py index 026bfe4c35f50..715e82d47d5de 100644 --- a/airflow/contrib/hooks/gcp_mlengine_hook.py +++ b/airflow/contrib/hooks/gcp_mlengine_hook.py @@ -67,6 +67,7 @@ def create_job(self, project_id, job, use_existing_job_fn=None): :param job: MLEngine Job object that should be provided to the MLEngine API, such as: :: + { 'jobId': 'my_job_id', 'trainingInput': { @@ -74,6 +75,7 @@ def create_job(self, project_id, job, use_existing_job_fn=None): ... } } + :type job: dict :param use_existing_job_fn: In case that a MLEngine job with the same diff --git a/airflow/contrib/hooks/gcp_pubsub_hook.py b/airflow/contrib/hooks/gcp_pubsub_hook.py index 33d00bdf543a1..50512d2127463 100644 --- a/airflow/contrib/hooks/gcp_pubsub_hook.py +++ b/airflow/contrib/hooks/gcp_pubsub_hook.py @@ -239,11 +239,10 @@ def pull(self, project, subscription, max_messages, return if no messages are available. Otherwise, the request will block for an undisclosed, but bounded period of time :type return_immediately: bool - :return A list of Pub/Sub ReceivedMessage objects each containing + :return: A list of Pub/Sub ReceivedMessage objects each containing an ``ackId`` property and a ``message`` property, which includes the base64-encoded message content. See - https://cloud.google.com/pubsub/docs/reference/rest/v1/\ - projects.subscriptions/pull#ReceivedMessage + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull#ReceivedMessage """ service = self.get_conn() full_subscription = _format_subscription(project, subscription) diff --git a/airflow/contrib/hooks/gcp_spanner_hook.py b/airflow/contrib/hooks/gcp_spanner_hook.py index 43779b1bb93d7..66e9fc52dbad0 100644 --- a/airflow/contrib/hooks/gcp_spanner_hook.py +++ b/airflow/contrib/hooks/gcp_spanner_hook.py @@ -166,13 +166,14 @@ def delete_instance(self, instance_id, project_id=None): """ Deletes an existing Cloud Spanner instance. - :param instance_id: The ID of the Cloud Spanner instance. + :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str - :param project_id: Optional, the ID of the GCP project that owns the Cloud Spanner + :param project_id: Optional, the ID of the GCP project that owns the Cloud Spanner database. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str - :return None + :return: None """ + instance = self._get_client(project_id=project_id).instance(instance_id) try: instance.delete() diff --git a/airflow/contrib/hooks/gcs_hook.py b/airflow/contrib/hooks/gcs_hook.py index 17495e9bbaff7..0b6e76803571f 100644 --- a/airflow/contrib/hooks/gcs_hook.py +++ b/airflow/contrib/hooks/gcs_hook.py @@ -501,6 +501,7 @@ def create_bucket(self, - ``STANDARD`` - ``NEARLINE`` - ``COLDLINE``. + If this value is not specified when the bucket is created, it will default to STANDARD. :type storage_class: str diff --git a/airflow/contrib/hooks/mongo_hook.py b/airflow/contrib/hooks/mongo_hook.py index c1debc305764e..c90326f760c93 100644 --- a/airflow/contrib/hooks/mongo_hook.py +++ b/airflow/contrib/hooks/mongo_hook.py @@ -29,8 +29,7 @@ class MongoHook(BaseHook): https://docs.mongodb.com/manual/reference/connection-string/index.html You can specify connection string options in extra field of your connection https://docs.mongodb.com/manual/reference/connection-string/index.html#connection-string-options - ex. - {replicaSet: test, ssl: True, connectTimeoutMS: 30000} + ex. ``{replicaSet: test, ssl: True, connectTimeoutMS: 30000}`` """ conn_type = 'mongo' diff --git a/airflow/contrib/hooks/sftp_hook.py b/airflow/contrib/hooks/sftp_hook.py index c23bd3012ba32..a6a59920f2b0b 100644 --- a/airflow/contrib/hooks/sftp_hook.py +++ b/airflow/contrib/hooks/sftp_hook.py @@ -31,13 +31,15 @@ class SFTPHook(SSHHook): Interact with SFTP. Aims to be interchangeable with FTPHook. - Pitfalls: - In contrast with FTPHook describe_directory only returns size, type and - modify. It doesn't return unix.owner, unix.mode, perm, unix.group and - unique. - - retrieve_file and store_file only take a local full path and not a - buffer. - - If no mode is passed to create_directory it will be created with 777 - permissions. + :Pitfalls:: + + - In contrast with FTPHook describe_directory only returns size, type and + modify. It doesn't return unix.owner, unix.mode, perm, unix.group and + unique. + - retrieve_file and store_file only take a local full path and not a + buffer. + - If no mode is passed to create_directory it will be created with 777 + permissions. Errors that may occur throughout but should be handled downstream. """ diff --git a/airflow/contrib/hooks/spark_jdbc_hook.py b/airflow/contrib/hooks/spark_jdbc_hook.py index b55e4ef060419..c188b1e863de2 100644 --- a/airflow/contrib/hooks/spark_jdbc_hook.py +++ b/airflow/contrib/hooks/spark_jdbc_hook.py @@ -62,7 +62,7 @@ class SparkJDBCHook(SparkSubmitHook): :param jdbc_table: The name of the JDBC table :type jdbc_table: str :param jdbc_conn_id: Connection id used for connection to JDBC database - :type: jdbc_conn_id: str + :type jdbc_conn_id: str :param jdbc_driver: Name of the JDBC driver to use for the JDBC connection. This driver (usually a jar) should be passed in the 'jars' parameter :type jdbc_driver: str diff --git a/airflow/contrib/hooks/spark_sql_hook.py b/airflow/contrib/hooks/spark_sql_hook.py index c1fd2ce21c89f..25c0bc53c5576 100644 --- a/airflow/contrib/hooks/spark_sql_hook.py +++ b/airflow/contrib/hooks/spark_sql_hook.py @@ -27,6 +27,7 @@ class SparkSqlHook(BaseHook): """ This hook is a wrapper around the spark-sql binary. It requires that the "spark-sql" binary is in the PATH. + :param sql: The SQL query to execute :type sql: str :param conf: arbitrary Spark configuration property @@ -91,6 +92,7 @@ def _prepare_command(self, cmd): """ Construct the spark-sql command to execute. Verbose output is enabled as default. + :param cmd: command to append to the spark-sql command :type cmd: str :return: full command to be executed diff --git a/airflow/contrib/hooks/wasb_hook.py b/airflow/contrib/hooks/wasb_hook.py index 8e9fb7ed9b766..d3a766cf69de7 100644 --- a/airflow/contrib/hooks/wasb_hook.py +++ b/airflow/contrib/hooks/wasb_hook.py @@ -162,7 +162,7 @@ def delete_file(self, container_name, blob_name, is_prefix=False, :param is_prefix: If blob_name is a prefix, delete all matching files :type is_prefix: bool :param ignore_if_missing: if True, then return success even if the - blob does not exist. + blob does not exist. :type ignore_if_missing: bool :param kwargs: Optional keyword arguments that `BlockBlobService.create_blob_from_path()` takes. diff --git a/airflow/contrib/operators/adls_to_gcs.py b/airflow/contrib/operators/adls_to_gcs.py index affbd456260ff..5d99251d3f6d2 100644 --- a/airflow/contrib/operators/adls_to_gcs.py +++ b/airflow/contrib/operators/adls_to_gcs.py @@ -52,6 +52,7 @@ class AdlsToGoogleCloudStorageOperator(AzureDataLakeStorageListOperator): The following Operator would copy a single file named ``hello/world.avro`` from ADLS to the GCS bucket ``mybucket``. Its full resulting gcs path will be ``gs://mybucket/hello/world.avro`` :: + copy_single_file = AdlsToGoogleCloudStorageOperator( task_id='copy_single_file', src_adls='hello/world.avro', @@ -63,6 +64,7 @@ class AdlsToGoogleCloudStorageOperator(AzureDataLakeStorageListOperator): The following Operator would copy all parquet files from ADLS to the GCS bucket ``mybucket``. :: + copy_all_files = AdlsToGoogleCloudStorageOperator( task_id='copy_all_files', src_adls='*.parquet', @@ -74,6 +76,7 @@ class AdlsToGoogleCloudStorageOperator(AzureDataLakeStorageListOperator): The following Operator would copy all parquet files from ADLS path ``/hello/world``to the GCS bucket ``mybucket``. :: + copy_world_files = AdlsToGoogleCloudStorageOperator( task_id='copy_world_files', src_adls='hello/world/*.parquet', diff --git a/airflow/contrib/operators/azure_container_instances_operator.py b/airflow/contrib/operators/azure_container_instances_operator.py index f1b2a2a1a5bab..896891b4d8400 100644 --- a/airflow/contrib/operators/azure_container_instances_operator.py +++ b/airflow/contrib/operators/azure_container_instances_operator.py @@ -62,8 +62,8 @@ class AzureContainerInstancesOperator(BaseOperator): :type image: str :param region: the region wherein this container instance should be started :type region: str - :param: environment_variables: key,value pairs containing environment variables - which will be passed to the running container + :param: environment_variables: key,value pairs containing environment + variables which will be passed to the running container :type: environment_variables: dict :param: volumes: list of volumes to be mounted to the container. Currently only Azure Fileshares are supported. diff --git a/airflow/contrib/operators/bigquery_to_bigquery.py b/airflow/contrib/operators/bigquery_to_bigquery.py index 2073cadef126c..288731e157de7 100644 --- a/airflow/contrib/operators/bigquery_to_bigquery.py +++ b/airflow/contrib/operators/bigquery_to_bigquery.py @@ -31,13 +31,13 @@ class BigQueryToBigQueryOperator(BaseOperator): https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy :param source_project_dataset_tables: One or more - dotted (project:|project.). BigQuery tables to use as the - source data. If is not included, project will be the + dotted ``(project:|project.).
`` BigQuery tables to use as the + source data. If ```` is not included, project will be the project defined in the connection json. Use a list if there are multiple source tables. (templated) :type source_project_dataset_tables: list|string :param destination_project_dataset_table: The destination BigQuery - table. Format is: (project:|project.).
(templated) + table. Format is: ``(project:|project.).
`` (templated) :type destination_project_dataset_table: str :param write_disposition: The write disposition if the table already exists. :type write_disposition: str diff --git a/airflow/contrib/operators/databricks_operator.py b/airflow/contrib/operators/databricks_operator.py index c3c5d6e65b4c7..5fbc605abea60 100644 --- a/airflow/contrib/operators/databricks_operator.py +++ b/airflow/contrib/operators/databricks_operator.py @@ -104,6 +104,7 @@ class DatabricksSubmitRunOperator(BaseOperator): to call the ``api/2.0/jobs/runs/submit`` endpoint and pass it directly to our ``DatabricksSubmitRunOperator`` through the ``json`` parameter. For example :: + json = { 'new_cluster': { 'spark_version': '2.1.0-db3-scala2.11', @@ -119,6 +120,7 @@ class DatabricksSubmitRunOperator(BaseOperator): of the ``DatabricksSubmitRunOperator`` directly. Note that there is exactly one named parameter for each top level parameter in the ``runs/submit`` endpoint. In this method, your code would look like this: :: + new_cluster = { 'spark_version': '2.1.0-db3-scala2.11', 'num_workers': 2 diff --git a/airflow/contrib/operators/gcs_operator.py b/airflow/contrib/operators/gcs_operator.py index c685722185fa3..db821717cdb51 100644 --- a/airflow/contrib/operators/gcs_operator.py +++ b/airflow/contrib/operators/gcs_operator.py @@ -42,6 +42,7 @@ class GoogleCloudStorageCreateBucketOperator(BaseOperator): - ``STANDARD`` - ``NEARLINE`` - ``COLDLINE``. + If this value is not specified when the bucket is created, it will default to STANDARD. :type storage_class: str @@ -49,8 +50,7 @@ class GoogleCloudStorageCreateBucketOperator(BaseOperator): Object data for objects in the bucket resides in physical storage within this region. Defaults to US. - .. seealso:: - https://developers.google.com/storage/docs/bucket-locations + .. seealso:: https://developers.google.com/storage/docs/bucket-locations :type location: str :param project_id: The ID of the GCP Project. (templated) @@ -65,18 +65,18 @@ class GoogleCloudStorageCreateBucketOperator(BaseOperator): have domain-wide delegation enabled. :type delegate_to: str - **Example**: - The following Operator would create a new bucket ``test-bucket`` - with ``MULTI_REGIONAL`` storage class in ``EU`` region :: - - CreateBucket = GoogleCloudStorageCreateBucketOperator( - task_id='CreateNewBucket', - bucket_name='test-bucket', - storage_class='MULTI_REGIONAL', - location='EU', - labels={'env': 'dev', 'team': 'airflow'}, - google_cloud_storage_conn_id='airflow-service-account' - ) + :Example:: + The following Operator would create a new bucket ``test-bucket`` + with ``MULTI_REGIONAL`` storage class in ``EU`` region :: + + CreateBucket = GoogleCloudStorageCreateBucketOperator( + task_id='CreateNewBucket', + bucket_name='test-bucket', + storage_class='MULTI_REGIONAL', + location='EU', + labels={'env': 'dev', 'team': 'airflow'}, + google_cloud_storage_conn_id='airflow-service-account' + ) """ template_fields = ('bucket_name', 'storage_class', diff --git a/airflow/contrib/operators/gcs_to_gcs.py b/airflow/contrib/operators/gcs_to_gcs.py index 525cb54d4a546..3a7c83d5a52ae 100644 --- a/airflow/contrib/operators/gcs_to_gcs.py +++ b/airflow/contrib/operators/gcs_to_gcs.py @@ -66,43 +66,48 @@ class GoogleCloudStorageToGoogleCloudStorageOperator(BaseOperator): If tzinfo has not been set, UTC will be assumed. :type last_modified_time: datetime - **Examples**: - The following Operator would copy a single file named - ``sales/sales-2017/january.avro`` in the ``data`` bucket to the file named - ``copied_sales/2017/january-backup.avro`` in the ``data_backup`` bucket :: - copy_single_file = GoogleCloudStorageToGoogleCloudStorageOperator( - task_id='copy_single_file', - source_bucket='data', - source_object='sales/sales-2017/january.avro', - destination_bucket='data_backup', - destination_object='copied_sales/2017/january-backup.avro', - google_cloud_storage_conn_id=google_cloud_conn_id - ) + :Example: + + The following Operator would copy a single file named + ``sales/sales-2017/january.avro`` in the ``data`` bucket to the file named + ``copied_sales/2017/january-backup.avro`` in the ``data_backup`` bucket :: + + copy_single_file = GoogleCloudStorageToGoogleCloudStorageOperator( + task_id='copy_single_file', + source_bucket='data', + source_object='sales/sales-2017/january.avro', + destination_bucket='data_backup', + destination_object='copied_sales/2017/january-backup.avro', + google_cloud_storage_conn_id=google_cloud_conn_id + ) - The following Operator would copy all the Avro files from ``sales/sales-2017`` - folder (i.e. with names starting with that prefix) in ``data`` bucket to the - ``copied_sales/2017`` folder in the ``data_backup`` bucket. :: - copy_files = GoogleCloudStorageToGoogleCloudStorageOperator( - task_id='copy_files', - source_bucket='data', - source_object='sales/sales-2017/*.avro', - destination_bucket='data_backup', - destination_object='copied_sales/2017/', - google_cloud_storage_conn_id=google_cloud_conn_id - ) + The following Operator would copy all the Avro files from ``sales/sales-2017`` + folder (i.e. with names starting with that prefix) in ``data`` bucket to the + ``copied_sales/2017`` folder in the ``data_backup`` bucket. :: + + copy_files = GoogleCloudStorageToGoogleCloudStorageOperator( + task_id='copy_files', + source_bucket='data', + source_object='sales/sales-2017/*.avro', + destination_bucket='data_backup', + destination_object='copied_sales/2017/', + google_cloud_storage_conn_id=google_cloud_conn_id + ) + + The following Operator would move all the Avro files from ``sales/sales-2017`` + folder (i.e. with names starting with that prefix) in ``data`` bucket to the + same folder in the ``data_backup`` bucket, deleting the original files in the + process. :: + + move_files = GoogleCloudStorageToGoogleCloudStorageOperator( + task_id='move_files', + source_bucket='data', + source_object='sales/sales-2017/*.avro', + destination_bucket='data_backup', + move_object=True, + google_cloud_storage_conn_id=google_cloud_conn_id + ) - The following Operator would move all the Avro files from ``sales/sales-2017`` - folder (i.e. with names starting with that prefix) in ``data`` bucket to the - same folder in the ``data_backup`` bucket, deleting the original files in the - process. :: - move_files = GoogleCloudStorageToGoogleCloudStorageOperator( - task_id='move_files', - source_bucket='data', - source_object='sales/sales-2017/*.avro', - destination_bucket='data_backup', - move_object=True, - google_cloud_storage_conn_id=google_cloud_conn_id - ) """ template_fields = ('source_bucket', 'source_object', 'destination_bucket', 'destination_object',) diff --git a/airflow/contrib/operators/jenkins_job_trigger_operator.py b/airflow/contrib/operators/jenkins_job_trigger_operator.py index 3e1aba0c1bb26..1701b199308f1 100644 --- a/airflow/contrib/operators/jenkins_job_trigger_operator.py +++ b/airflow/contrib/operators/jenkins_job_trigger_operator.py @@ -43,6 +43,7 @@ def jenkins_request_with_headers(jenkins_server, req, add_crumb=True): to get the location from them This function is just a copy of the one present in python-jenkins library with just the return call changed + :param jenkins_server: The server to query :param req: The request to execute :param add_crumb: Boolean to indicate if it should add crumb to the request @@ -94,6 +95,7 @@ class JenkinsJobTriggerOperator(BaseOperator): This operator depend on python-jenkins library, version >= 0.4.15 to communicate with jenkins server. You'll also need to configure a Jenkins connection in the connections screen. + :param jenkins_connection_id: The jenkins connection to use for this job :type jenkins_connection_id: str :param job_name: The name of the job to trigger @@ -101,7 +103,7 @@ class JenkinsJobTriggerOperator(BaseOperator): :param parameters: The parameters block to provide to jenkins. (templated) :type parameters: str :param sleep_time: How long will the operator sleep between each status - request for the job (min 1, default 10) + request for the job (min 1, default 10) :type sleep_time: int :param max_try_before_job_appears: The maximum number of requests to make while waiting for the job to appears on jenkins server (default 10) @@ -135,9 +137,10 @@ def build_job(self, jenkins_server): It returned a dict with 2 keys : body and headers. headers contains also a dict-like object which can be queried to get the location to poll in the queue. + :param jenkins_server: The jenkins server where the job should be triggered :return: Dict containing the response body (key body) - and the headers coming along (headers) + and the headers coming along (headers) """ # Warning if the parameter is too long, the URL can be longer than # the maximum allowed size @@ -163,6 +166,7 @@ def poll_job_in_queue(self, location, jenkins_server): returned by the build_job call and poll this file. When a 'executable' block appears in the json, it means the job execution started and the field 'number' then contains the build number. + :param location: Location to poll, returned in the header of the build_job call :param jenkins_server: The jenkins server to poll :return: The build_number corresponding to the triggered job diff --git a/airflow/contrib/operators/mlengine_operator.py b/airflow/contrib/operators/mlengine_operator.py index 5029e5e3f3084..65015acb2150b 100644 --- a/airflow/contrib/operators/mlengine_operator.py +++ b/airflow/contrib/operators/mlengine_operator.py @@ -68,17 +68,16 @@ class MLEngineBatchPredictionOperator(BaseOperator): NOTE: For model origin, users should consider exactly one from the three options below: - 1. Populate 'uri' field only, which should be a GCS location that - points to a tensorflow savedModel directory. - 2. Populate 'model_name' field only, which refers to an existing - model, and the default version of the model will be used. - 3. Populate both 'model_name' and 'version_name' fields, which - refers to a specific version of a specific model. - In options 2 and 3, both model and version name should contain the - minimal identifier. For instance, call + 1. Populate ``uri`` field only, which should be a GCS location that + points to a tensorflow savedModel directory. + 2. Populate ``model_name`` field only, which refers to an existing + model, and the default version of the model will be used. + 3. Populate both ``model_name`` and ``version_name`` fields, which + refers to a specific version of a specific model. - :: + In options 2 and 3, both model and version name should contain the + minimal identifier. For instance, call:: MLEngineBatchPredictionOperator( ..., @@ -87,7 +86,7 @@ class MLEngineBatchPredictionOperator(BaseOperator): ...) if the desired model version is - "projects/my_project/models/my_model/versions/my_version". + ``projects/my_project/models/my_model/versions/my_version``. See https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs for further documentation on the parameters. @@ -106,7 +105,7 @@ class MLEngineBatchPredictionOperator(BaseOperator): :type data_format: str :param input_paths: A list of GCS paths of input data for batch - prediction. Accepting wildcard operator *, but only at the end. (templated) + prediction. Accepting wildcard operator ``*``, but only at the end. (templated) :type input_paths: list of string :param output_path: The GCS path where the prediction results are @@ -151,8 +150,8 @@ class MLEngineBatchPredictionOperator(BaseOperator): have domain-wide delegation enabled. :type delegate_to: str - Raises: - ``ValueError``: if a unique model/version origin cannot be determined. + :raises: ``ValueError``: if a unique model/version origin cannot be + determined. """ template_fields = [ diff --git a/airflow/contrib/operators/qubole_check_operator.py b/airflow/contrib/operators/qubole_check_operator.py index 8b6b5d351cd86..142b07e79eb04 100644 --- a/airflow/contrib/operators/qubole_check_operator.py +++ b/airflow/contrib/operators/qubole_check_operator.py @@ -69,7 +69,8 @@ class QuboleCheckOperator(CheckOperator, QuboleOperator): which the checks have to be performed. .. note:: All fields in common with template fields of - QuboleOperator and CheckOperator are template-supported. + QuboleOperator and CheckOperator are template-supported. + """ template_fields = QuboleOperator.template_fields + CheckOperator.template_fields diff --git a/airflow/contrib/operators/sqoop_operator.py b/airflow/contrib/operators/sqoop_operator.py index fa61ca14cac45..c7ebcf4b3a2d4 100644 --- a/airflow/contrib/operators/sqoop_operator.py +++ b/airflow/contrib/operators/sqoop_operator.py @@ -34,7 +34,7 @@ class SqoopOperator(BaseOperator): """ Execute a Sqoop job. Documentation for Apache Sqoop can be found here: - https://sqoop.apache.org/docs/1.4.2/SqoopUserGuide.html. + https://sqoop.apache.org/docs/1.4.2/SqoopUserGuide.html """ template_fields = ('conn_id', 'cmd_type', 'table', 'query', 'target_dir', 'file_type', 'columns', 'split_by', diff --git a/airflow/contrib/operators/wasb_delete_blob_operator.py b/airflow/contrib/operators/wasb_delete_blob_operator.py index 4634741d8b824..4173d7e7a8a5d 100644 --- a/airflow/contrib/operators/wasb_delete_blob_operator.py +++ b/airflow/contrib/operators/wasb_delete_blob_operator.py @@ -37,7 +37,7 @@ class WasbDeleteBlobOperator(BaseOperator): :param is_prefix: If blob_name is a prefix, delete all files matching prefix. :type is_prefix: bool :param ignore_if_missing: if True, then return success even if the - blob does not exist. + blob does not exist. :type ignore_if_missing: bool """ diff --git a/airflow/contrib/sensors/python_sensor.py b/airflow/contrib/sensors/python_sensor.py index 68bc7497eaf41..ecdc7e6c8ea40 100644 --- a/airflow/contrib/sensors/python_sensor.py +++ b/airflow/contrib/sensors/python_sensor.py @@ -26,8 +26,8 @@ class PythonSensor(BaseSensorOperator): Waits for a Python callable to return True. User could put input argument in templates_dict - e.g templates_dict = {'start_ds': 1970} - and access the argument by calling `kwargs['templates_dict']['start_ds']` + e.g ``templates_dict = {'start_ds': 1970}`` + and access the argument by calling ``kwargs['templates_dict']['start_ds']`` in the the callable :param python_callable: A reference to an object that is callable diff --git a/airflow/contrib/sensors/weekday_sensor.py b/airflow/contrib/sensors/weekday_sensor.py index 1a7a702aef97c..e0357c5b2f496 100644 --- a/airflow/contrib/sensors/weekday_sensor.py +++ b/airflow/contrib/sensors/weekday_sensor.py @@ -60,10 +60,12 @@ class DayOfWeekSensor(BaseSensorOperator): :param week_day: Day of the week to check (full name). Optionally, a set of days can also be provided using a set. Example values: + * ``"MONDAY"``, * ``{"Saturday", "Sunday"}`` * ``{WeekDay.TUESDAY}`` * ``{WeekDay.SATURDAY, WeekDay.SUNDAY}`` + :type week_day: set or str or WeekDay :param use_task_execution_day: If ``True``, uses task's execution day to compare with week_day. Execution Date is Useful for backfilling. diff --git a/airflow/hooks/dbapi_hook.py b/airflow/hooks/dbapi_hook.py index e69631958e3fa..4fce22786997e 100644 --- a/airflow/hooks/dbapi_hook.py +++ b/airflow/hooks/dbapi_hook.py @@ -196,7 +196,7 @@ def get_autocommit(self, conn): :param conn: Connection to get autocommit setting from. :type conn: connection object. :return: connection autocommit setting. - :rtype bool. + :rtype: bool """ return getattr(conn, 'autocommit', False) and self.supports_autocommit diff --git a/airflow/hooks/http_hook.py b/airflow/hooks/http_hook.py index 1e0c7b3058147..200a6741f2cae 100644 --- a/airflow/hooks/http_hook.py +++ b/airflow/hooks/http_hook.py @@ -29,6 +29,7 @@ class HttpHook(BaseHook): """ Interact with HTTP servers. + :param http_conn_id: connection that has the base API url i.e https://www.google.com/ and optional authentication credentials. Default headers can also be specified in the Extra field in json format. @@ -52,6 +53,7 @@ def __init__( def get_conn(self, headers=None): """ Returns http session for use with requests + :param headers: additional headers to be passed through as a dictionary :type headers: dict """ @@ -83,6 +85,7 @@ def get_conn(self, headers=None): def run(self, endpoint, data=None, headers=None, extra_options=None): """ Performs the request + :param endpoint: the endpoint to be called i.e. resource/v1/query? :type endpoint: str :param data: payload to be uploaded or request parameters @@ -130,6 +133,7 @@ def check_response(self, response): """ Checks the status code and raise an AirflowException exception on non 2XX or 3XX status codes + :param response: A requests response object :type response: requests.response """ @@ -145,6 +149,7 @@ def run_and_check(self, session, prepped_request, extra_options): """ Grabs extra options like timeout and actually runs the request, checking for the result + :param session: the session to be used to execute the request :type session: requests.Session :param prepped_request: the prepared request generated in run() @@ -179,12 +184,14 @@ def run_with_advanced_retry(self, _retry_args, *args, **kwargs): Runs Hook.run() with a Tenacity decorator attached to it. This is useful for connectors which might be disturbed by intermittent issues and should not instantly fail. + :param _retry_args: Arguments which define the retry behaviour. See Tenacity documentation at https://github.com/jd/tenacity :type _retry_args: dict - Example: :: + :Example:: + hook = HttpHook(http_conn_id='my_conn',method='GET') retry_args = dict( wait=tenacity.wait_exponential(), diff --git a/airflow/hooks/mysql_hook.py b/airflow/hooks/mysql_hook.py index ce35bf427e898..3e839527ee9a5 100644 --- a/airflow/hooks/mysql_hook.py +++ b/airflow/hooks/mysql_hook.py @@ -55,7 +55,7 @@ def get_autocommit(self, conn): :param conn: connection to get autocommit setting from. :type conn: connection object. :return: connection autocommit setting - :rtype bool + :rtype: bool """ return conn.get_autocommit() diff --git a/airflow/hooks/oracle_hook.py b/airflow/hooks/oracle_hook.py index 39e447d010793..ae6aa0f039cf6 100644 --- a/airflow/hooks/oracle_hook.py +++ b/airflow/hooks/oracle_hook.py @@ -46,6 +46,7 @@ def get_conn(self): :param dsn: the host address for the Oracle server :param service_name: the db_unique_name of the database that you are connecting to (CONNECT_DATA part of TNS) + You can set these parameters in the extra fields of your connection as in ``{ "dsn":"some.host.address" , "service_name":"some.service.name" }`` """ @@ -74,9 +75,10 @@ def insert_rows(self, table, rows, target_fields=None, commit_every=1000): A generic way to insert a set of tuples into a table, the whole set of inserts is treated as one transaction Changes from standard DbApiHook implementation: - - Oracle SQL queries in cx_Oracle can not be terminated with a semicolon (';') - - Replace NaN values with NULL using numpy.nan_to_num (not using is_nan() - because of input types error for strings) + + - Oracle SQL queries in cx_Oracle can not be terminated with a semicolon (`;`) + - Replace NaN values with NULL using `numpy.nan_to_num` (not using + `is_nan()` because of input types error for strings) - Coerce datetime cells to Oracle DATETIME format during insert """ if target_fields: diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 921095033feb0..35be84ce200d7 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -475,9 +475,9 @@ def kill_zombies(self, zombies, session=None): had a heartbeat for too long, in the current DagBag. :param zombies: zombie task instances to kill. - :type zombies: SimpleTaskInstance + :type zombies: ``SimpleTaskInstance`` :param session: DB session. - :type Session. + :type session: Session """ for zombie in zombies: if zombie.dag_id in self.dags: @@ -3540,8 +3540,10 @@ def handle_callback(self, dagrun, success=True, reason=None, session=None): on_failure_callback or on_success_callback. This method gets the context of a single TaskInstance part of this DagRun and passes that to the callable along with a 'reason', primarily to differentiate DagRun failures. - .. note:: - The logs end up in $AIRFLOW_HOME/logs/scheduler/latest/PROJECT/DAG_FILE.py.log + + .. note: The logs end up in + ``$AIRFLOW_HOME/logs/scheduler/latest/PROJECT/DAG_FILE.py.log`` + :param dagrun: DagRun object :param success: Flag to specify if failure or success callback should be called :param reason: Completion reason @@ -4427,10 +4429,10 @@ def setdefault(cls, key, default, deserialize_json=False): :param key: Dict key for this Variable :type key: String :param default: Default value to set and return if the variable - isn't already in the DB + isn't already in the DB :type default: Mixed :param deserialize_json: Store this as a JSON encoded value in the DB - and un-encode it when retrieving a value + and un-encode it when retrieving a value :return: Mixed """ default_sentinel = object() @@ -4538,6 +4540,7 @@ def set( Store an XCom value. TODO: "pickling" has been deprecated and JSON is preferred. "pickling" will be removed in Airflow 2.0. + :return: None """ session.expunge_all() @@ -4587,7 +4590,8 @@ def get_one(cls, """ Retrieve an XCom value, optionally meeting certain criteria. TODO: "pickling" has been deprecated and JSON is preferred. - "pickling" will be removed in Airflow 2.0. + "pickling" will be removed in Airflow 2.0. + :return: XCom value """ filters = [] @@ -4635,7 +4639,7 @@ def get_many(cls, """ Retrieve an XCom value, optionally meeting certain criteria TODO: "pickling" has been deprecated and JSON is preferred. - "pickling" will be removed in Airflow 2.0. + "pickling" will be removed in Airflow 2.0. """ filters = [] if key: @@ -4763,7 +4767,7 @@ def find(dag_id=None, run_id=None, execution_date=None, :param external_trigger: whether this dag run is externally triggered :type external_trigger: bool :param no_backfills: return no backfills (True), return all (False). - Defaults to False + Defaults to False :type no_backfills: bool :param session: database session :type session: Session @@ -5020,7 +5024,7 @@ def get_run(session, dag_id, execution_date): :param execution_date: execution date :type execution_date: datetime :return: DagRun corresponding to the given dag_id and execution date - if one exists. None otherwise. + if one exists. None otherwise. :rtype: DagRun """ qry = session.query(DagRun).filter( diff --git a/airflow/operators/s3_file_transform_operator.py b/airflow/operators/s3_file_transform_operator.py index c121b3830043c..74c351db86356 100644 --- a/airflow/operators/s3_file_transform_operator.py +++ b/airflow/operators/s3_file_transform_operator.py @@ -58,6 +58,7 @@ class S3FileTransformOperator(BaseOperator): - ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses. You can specify this argument if you want to use a different CA cert bundle than the one used by botocore. + This is also applicable to ``dest_verify``. :type source_verify: bool or str :param dest_s3_key: The key to be written from S3. (templated) diff --git a/airflow/task/task_runner/__init__.py b/airflow/task/task_runner/__init__.py index 5a30cf5dc4cf0..960a9e9e8e426 100644 --- a/airflow/task/task_runner/__init__.py +++ b/airflow/task/task_runner/__init__.py @@ -29,7 +29,7 @@ def get_task_runner(local_task_job): Get the task runner that can be used to run the given job. :param local_task_job: The LocalTaskJob associated with the TaskInstance - that needs to be executed. + that needs to be executed. :type local_task_job: airflow.jobs.LocalTaskJob :return: The task runner to use to run the task. :rtype: airflow.task.task_runner.base_task_runner.BaseTaskRunner diff --git a/airflow/task/task_runner/base_task_runner.py b/airflow/task/task_runner/base_task_runner.py index 2a346de939e73..d2bf172bd435c 100644 --- a/airflow/task/task_runner/base_task_runner.py +++ b/airflow/task/task_runner/base_task_runner.py @@ -147,7 +147,7 @@ def return_code(self): """ :return: The return code associated with running the task instance or None if the task is not yet done. - :rtype int: + :rtype: int """ raise NotImplementedError() diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index 63a21277feda9..27546591277dd 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -284,7 +284,7 @@ def list_py_file_paths(directory, safe_mode=True, :param directory: the directory to traverse :type directory: unicode :param safe_mode: whether to use a heuristic to determine whether a file - contains Airflow DAG definitions + contains Airflow DAG definitions :return: a list of paths to Python files in the specified directory :rtype: list[unicode] """ diff --git a/airflow/utils/helpers.py b/airflow/utils/helpers.py index c35d6aacc9dd0..a311a62418622 100644 --- a/airflow/utils/helpers.py +++ b/airflow/utils/helpers.py @@ -301,11 +301,13 @@ def parse_template_string(template_string): def render_log_filename(ti, try_number, filename_template): """ - Given task instance, try_number, filename_template, return the rendered log filename + Given task instance, try_number, filename_template, return the rendered log + filename :param ti: task instance :param try_number: try_number of the task - :param filename_template: filename template, which can be jinja template or python string template + :param filename_template: filename template, which can be jinja template or + python string template """ filename_template, filename_jinja_template = parse_template_string(filename_template) if filename_jinja_template: diff --git a/docs/code.rst b/docs/code.rst index d290398f3f4cd..6b07c3a9218ba 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -135,13 +135,13 @@ Operators .. autoclass:: airflow.contrib.operators.awsbatch_operator.AWSBatchOperator .. autoclass:: airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator -.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator +.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator .. autoclass:: airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator +.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator -.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator .. autoclass:: airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator .. autoclass:: airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator @@ -149,20 +149,20 @@ Operators .. autoclass:: airflow.contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator .. autoclass:: airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator .. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator -.. autoclass:: airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator .. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator +.. autoclass:: airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPigOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHiveOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHiveOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPigOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateBaseOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator .. autoclass:: airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator .. autoclass:: airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator .. autoclass:: airflow.contrib.operators.discord_webhook_operator.DiscordWebhookOperator @@ -173,9 +173,40 @@ Operators .. autoclass:: airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator .. autoclass:: airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator .. autoclass:: airflow.contrib.operators.file_to_wasb.FileToWasbOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceBaseOperator +.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator +.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator +.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator +.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator +.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEPodOperator +.. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlBaseOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator +.. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator +.. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator .. autoclass:: airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator .. autoclass:: airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator .. autoclass:: airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator @@ -191,18 +222,18 @@ Operators .. autoclass:: airflow.contrib.operators.kubernetes_pod_operator.KubernetesPodOperator .. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator .. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineModelOperator -.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator .. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator +.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator .. autoclass:: airflow.contrib.operators.mongo_to_s3.MongoToS3Operator .. autoclass:: airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator .. autoclass:: airflow.contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeTransfer .. autoclass:: airflow.contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer .. autoclass:: airflow.contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator -.. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubTopicCreateOperator -.. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubTopicDeleteOperator +.. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubPublishOperator .. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubSubscriptionCreateOperator .. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubSubscriptionDeleteOperator -.. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubPublishOperator +.. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubTopicCreateOperator +.. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubTopicDeleteOperator .. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator .. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator .. autoclass:: airflow.contrib.operators.qubole_operator.QuboleOperator @@ -212,8 +243,8 @@ Operators .. autoclass:: airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator .. autoclass:: airflow.contrib.operators.s3_to_gcs_transfer_operator.S3ToGoogleCloudStorageTransferOperator .. autoclass:: airflow.contrib.operators.sagemaker_base_operator.SageMakerBaseOperator -.. autoclass:: airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator .. autoclass:: airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator +.. autoclass:: airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator .. autoclass:: airflow.contrib.operators.sagemaker_model_operator.SageMakerModelOperator .. autoclass:: airflow.contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator .. autoclass:: airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator diff --git a/docs/conf.py b/docs/conf.py index 0b5e69f159b18..0a9d72d951a94 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -38,19 +38,20 @@ import airflow MOCK_MODULES = [ + 'google.auth.default', + 'google.oauth2.service_account', + 'google_auth_httplib2', 'googleapiclient', - 'googleapiclient.errors', 'googleapiclient.discovery', + 'googleapiclient.errors', 'googleapiclient.http', 'mesos', 'mesos.interface', 'mesos.native', - 'google.auth.default', - 'google_auth_httplib2', - 'google.oauth2.service_account', 'pandas.io.gbq', + 'pymssql', + 'simple_salesforce', 'vertica_python', - 'pymssql' ] for mod_name in MOCK_MODULES: sys.modules[mod_name] = mock.Mock() diff --git a/docs/howto/operator.rst b/docs/howto/operator.rst index 733ed393b2df6..59656beb4302c 100644 --- a/docs/howto/operator.rst +++ b/docs/howto/operator.rst @@ -1363,8 +1363,6 @@ as shown in the example: :start-after: [START howto_operator_cloudsql_import_gcs_permissions] :end-before: [END howto_operator_cloudsql_import_gcs_permissions] -.. _CloudSqlInstanceCreateOperator: - CloudSqlInstanceCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1420,8 +1418,6 @@ More information See `Google Cloud SQL API documentation for insert `_. -.. _CloudSqlInstancePatchOperator: - CloudSqlInstancePatchOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/howto/secure-connections.rst b/docs/howto/secure-connections.rst index a2ca46024a066..14590fa4b74d8 100644 --- a/docs/howto/secure-connections.rst +++ b/docs/howto/secure-connections.rst @@ -37,14 +37,14 @@ You can still enable encryption for passwords within connections by following be print(fernet_key.decode()) # your fernet_key, keep it in secured place! 3. Replace ``airflow.cfg`` fernet_key value with the one from step 2. -Alternatively, you can store your fernet_key in OS environment variable. You -do not need to change ``airflow.cfg`` in this case as Airflow will use environment -variable over the value in ``airflow.cfg``: + Alternatively, you can store your fernet_key in OS environment variable. You + do not need to change ``airflow.cfg`` in this case as Airflow will use environment + variable over the value in ``airflow.cfg``: -.. code-block:: bash + .. code-block:: bash - # Note the double underscores - export AIRFLOW__CORE__FERNET_KEY=your_fernet_key + # Note the double underscores + export AIRFLOW__CORE__FERNET_KEY=your_fernet_key 4. Restart Airflow webserver. 5. For existing connections (the ones that you had defined before installing ``airflow[crypto]`` and creating a Fernet key), you need to open each connection in the connection admin UI, re-type the password, and save it. @@ -61,5 +61,5 @@ the new key to the ``fernet_key`` setting, run 1. Set ``fernet_key`` to ``new_fernet_key,old_fernet_key``. 2. Run ``airflow rotate_fernet_key`` to reencrypt existing credentials -with the new fernet key. + with the new fernet key. 3. Set ``fernet_key`` to ``new_fernet_key``. diff --git a/docs/integration.rst b/docs/integration.rst index bafd79f22513a..fbe4accac6332 100644 --- a/docs/integration.rst +++ b/docs/integration.rst @@ -121,6 +121,15 @@ WasbBlobSensor """""""""""""" .. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbBlobSensor + :noindex: + +.. _WasbDeleteBlobOperator: + +WasbDeleteBlobOperator +"""""""""""""""""""""" + +.. autoclass:: airflow.contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator + :noindex: .. _WasbPrefixSensor: @@ -128,6 +137,7 @@ WasbPrefixSensor """""""""""""""" .. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbPrefixSensor + :noindex: .. _FileToWasbOperator: @@ -135,6 +145,7 @@ FileToWasbOperator """""""""""""""""" .. autoclass:: airflow.contrib.operators.file_to_wasb.FileToWasbOperator + :noindex: .. _WasbHook: @@ -142,6 +153,7 @@ WasbHook """""""" .. autoclass:: airflow.contrib.hooks.wasb_hook.WasbHook + :noindex: Azure File Share '''''''''''''''' @@ -155,6 +167,7 @@ AzureFileShareHook """""""""""""""""" .. autoclass:: airflow.contrib.hooks.azure_fileshare_hook.AzureFileShareHook + :noindex: Logging ''''''' @@ -180,16 +193,19 @@ AzureCosmosDBHook """"""""""""""""" .. autoclass:: airflow.contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook + :noindex: AzureCosmosInsertDocumentOperator """"""""""""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator + :noindex: AzureCosmosDocumentSensor """"""""""""""""""""""""" .. autoclass:: airflow.contrib.sensors.azure_cosmos_sensor.AzureCosmosDocumentSensor + :noindex: Azure Data Lake ''''''''''''''' @@ -197,7 +213,7 @@ Azure Data Lake AzureDataLakeHook communicates via a REST API compatible with WebHDFS. Make sure that a Airflow connection of type `azure_data_lake` exists. Authorization can be done by supplying a login (=Client ID), password (=Client Secret) and extra fields tenant (Tenant) and account_name (Account Name) - (see connection `azure_data_lake_default` for an example). +(see connection `azure_data_lake_default` for an example). - :ref:`AzureDataLakeHook`: Interface with Azure Data Lake. - :ref:`AzureDataLakeStorageListOperator`: Lists the files located in a specified Azure Data Lake path. @@ -209,6 +225,7 @@ AzureDataLakeHook """"""""""""""""" .. autoclass:: airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook + :noindex: .. _AzureDataLakeStorageListOperator: @@ -216,6 +233,7 @@ AzureDataLakeStorageListOperator """""""""""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator + :noindex: .. _AdlsToGoogleCloudStorageOperator: @@ -223,6 +241,7 @@ AdlsToGoogleCloudStorageOperator """""""""""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator + :noindex: Azure Container Instances ''''''''''''''''''''''''' @@ -246,6 +265,7 @@ AzureContainerInstancesOperator """"""""""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator + :noindex: .. _AzureContainerInstanceHook: @@ -253,6 +273,7 @@ AzureContainerInstanceHook """""""""""""""""""""""""" .. autoclass:: airflow.contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook + :noindex: .. _AzureContainerRegistryHook: @@ -260,6 +281,7 @@ AzureContainerRegistryHook """""""""""""""""""""""""" .. autoclass:: airflow.contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook + :noindex: .. _AzureContainerVolumeHook: @@ -267,6 +289,7 @@ AzureContainerVolumeHook """""""""""""""""""""""" .. autoclass:: airflow.contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook + :noindex: .. _AWS: @@ -291,6 +314,7 @@ EmrAddStepsOperator """"""""""""""""""" .. autoclass:: airflow.contrib.operators.emr_add_steps_operator.EmrAddStepsOperator + :noindex: .. _EmrCreateJobFlowOperator: @@ -298,6 +322,7 @@ EmrCreateJobFlowOperator """""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator + :noindex: .. _EmrTerminateJobFlowOperator: @@ -305,6 +330,7 @@ EmrTerminateJobFlowOperator """"""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator + :noindex: .. _EmrHook: @@ -312,6 +338,7 @@ EmrHook """"""" .. autoclass:: airflow.contrib.hooks.emr_hook.EmrHook + :noindex: AWS S3 @@ -330,6 +357,7 @@ S3Hook """""" .. autoclass:: airflow.hooks.S3_hook.S3Hook + :noindex: .. _S3FileTransformOperator: @@ -337,6 +365,7 @@ S3FileTransformOperator """"""""""""""""""""""" .. autoclass:: airflow.operators.s3_file_transform_operator.S3FileTransformOperator + :noindex: .. _S3ListOperator: @@ -344,6 +373,7 @@ S3ListOperator """""""""""""" .. autoclass:: airflow.contrib.operators.s3_list_operator.S3ListOperator + :noindex: .. _S3ToGoogleCloudStorageOperator: @@ -351,13 +381,15 @@ S3ToGoogleCloudStorageOperator """""""""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator + :noindex: .. _S3ToGoogleCloudStorageTransferOperator: S3ToGoogleCloudStorageTransferOperator """""""""""""""""""""""""""""""""""""" -.. autoclass:: airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageTransferOperator +.. autoclass:: airflow.contrib.operators.s3_to_gcs_transfer_operator.S3ToGoogleCloudStorageTransferOperator + :noindex: .. _S3ToHiveTransfer: @@ -365,6 +397,7 @@ S3ToHiveTransfer """""""""""""""" .. autoclass:: airflow.operators.s3_to_hive_operator.S3ToHiveTransfer + :noindex: AWS EC2 Container Service @@ -378,6 +411,7 @@ ECSOperator """"""""""" .. autoclass:: airflow.contrib.operators.ecs_operator.ECSOperator + :noindex: AWS Batch Service @@ -391,6 +425,7 @@ AWSBatchOperator """""""""""""""" .. autoclass:: airflow.contrib.operators.awsbatch_operator.AWSBatchOperator + :noindex: AWS RedShift @@ -407,6 +442,7 @@ AwsRedshiftClusterSensor """""""""""""""""""""""" .. autoclass:: airflow.contrib.sensors.aws_redshift_cluster_sensor.AwsRedshiftClusterSensor + :noindex: .. _RedshiftHook: @@ -414,6 +450,7 @@ RedshiftHook """""""""""" .. autoclass:: airflow.contrib.hooks.redshift_hook.RedshiftHook + :noindex: .. _RedshiftToS3Transfer: @@ -421,6 +458,7 @@ RedshiftToS3Transfer """""""""""""""""""" .. autoclass:: airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer + :noindex: .. _S3ToRedshiftTransfer: @@ -428,6 +466,7 @@ S3ToRedshiftTransfer """""""""""""""""""" .. autoclass:: airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer + :noindex: AWS DynamoDB '''''''''''' @@ -441,6 +480,7 @@ HiveToDynamoDBTransferOperator """""""""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator + :noindex: .. _AwsDynamoDBHook: @@ -448,6 +488,7 @@ AwsDynamoDBHook """"""""""""""" .. autoclass:: airflow.contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook + :noindex: AWS Lambda '''''''''' @@ -460,6 +501,7 @@ AwsLambdaHook """"""""""""" .. autoclass:: airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook + :noindex: AWS Kinesis ''''''''''' @@ -472,6 +514,7 @@ AwsFirehoseHook """"""""""""""" .. autoclass:: airflow.contrib.hooks.aws_firehose_hook.AwsFirehoseHook + :noindex: Amazon SageMaker '''''''''''''''' @@ -494,6 +537,7 @@ SageMakerHook """"""""""""" .. autoclass:: airflow.contrib.hooks.sagemaker_hook.SageMakerHook + :noindex: .. _SageMakerTrainingOperator: @@ -501,6 +545,7 @@ SageMakerTrainingOperator """"""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator + :noindex: .. _SageMakerTuningOperator: @@ -508,6 +553,7 @@ SageMakerTuningOperator """"""""""""""""""""""" .. autoclass:: airflow.contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator + :noindex: .. _SageMakerModelOperator: @@ -515,6 +561,7 @@ SageMakerModelOperator """""""""""""""""""""" .. autoclass:: airflow.contrib.operators.sagemaker_model_operator.SageMakerModelOperator + :noindex: .. _SageMakerTransformOperator: @@ -522,6 +569,7 @@ SageMakerTransformOperator """""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator + :noindex: .. _SageMakerEndpointConfigOperator: @@ -529,6 +577,7 @@ SageMakerEndpointConfigOperator """"""""""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator + :noindex: .. _SageMakerEndpointOperator: @@ -536,6 +585,7 @@ SageMakerEndpointOperator """"""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator + :noindex: .. _Databricks: @@ -550,7 +600,7 @@ DatabricksSubmitRunOperator ''''''''''''''''''''''''''' .. autoclass:: airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator - + :noindex: .. _GCP: @@ -575,6 +625,7 @@ GoogleCloudBaseHook ''''''''''''''''''' .. autoclass:: airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook + :noindex: .. _GoogleCloudBaseHook: @@ -604,6 +655,7 @@ BigQueryCheckOperator ^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator + :noindex: .. _BigQueryValueCheckOperator: @@ -611,6 +663,7 @@ BigQueryValueCheckOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator + :noindex: .. _BigQueryIntervalCheckOperator: @@ -618,6 +671,7 @@ BigQueryIntervalCheckOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator + :noindex: .. _BigQueryGetDataOperator: @@ -625,6 +679,7 @@ BigQueryGetDataOperator ^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator + :noindex: .. _BigQueryCreateEmptyTableOperator: @@ -632,6 +687,7 @@ BigQueryCreateEmptyTableOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator + :noindex: .. _BigQueryCreateExternalTableOperator: @@ -639,6 +695,7 @@ BigQueryCreateExternalTableOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator + :noindex: .. _BigQueryCreateEmptyDatasetOperator: @@ -646,6 +703,7 @@ BigQueryCreateEmptyDatasetOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator + :noindex: .. _BigQueryDeleteDatasetOperator: @@ -653,6 +711,7 @@ BigQueryDeleteDatasetOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator + :noindex: .. _BigQueryTableDeleteOperator: @@ -660,6 +719,7 @@ BigQueryTableDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator + :noindex: .. _BigQueryOperator: @@ -667,6 +727,7 @@ BigQueryOperator ^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator + :noindex: .. _BigQueryToBigQueryOperator: @@ -674,6 +735,7 @@ BigQueryToBigQueryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator + :noindex: .. _BigQueryToCloudStorageOperator: @@ -681,6 +743,7 @@ BigQueryToCloudStorageOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator + :noindex: BigQueryHook @@ -688,6 +751,7 @@ BigQueryHook .. autoclass:: airflow.contrib.hooks.bigquery_hook.BigQueryHook :members: + :noindex: Cloud Spanner ''''''''''''' @@ -713,6 +777,7 @@ CloudSpannerInstanceDatabaseDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator + :noindex: .. _CloudSpannerInstanceDatabaseDeployOperator: @@ -720,6 +785,7 @@ CloudSpannerInstanceDatabaseDeployOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator + :noindex: .. _CloudSpannerInstanceDatabaseUpdateOperator: @@ -727,6 +793,7 @@ CloudSpannerInstanceDatabaseUpdateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator + :noindex: .. _CloudSpannerInstanceDatabaseQueryOperator: @@ -734,6 +801,7 @@ CloudSpannerInstanceDatabaseQueryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator + :noindex: .. _CloudSpannerInstanceDeployOperator: @@ -741,6 +809,7 @@ CloudSpannerInstanceDeployOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator + :noindex: .. _CloudSpannerInstanceDeleteOperator: @@ -748,6 +817,7 @@ CloudSpannerInstanceDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator + :noindex: CloudSpannerHook @@ -755,6 +825,7 @@ CloudSpannerHook .. autoclass:: airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook :members: + :noindex: Cloud SQL @@ -782,6 +853,7 @@ CloudSqlInstanceDatabaseDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator + :noindex: .. _CloudSqlInstanceDatabaseCreateOperator: @@ -789,6 +861,7 @@ CloudSqlInstanceDatabaseCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator + :noindex: .. _CloudSqlInstanceDatabasePatchOperator: @@ -796,6 +869,7 @@ CloudSqlInstanceDatabasePatchOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator + :noindex: .. _CloudSqlInstanceDeleteOperator: @@ -803,6 +877,7 @@ CloudSqlInstanceDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator + :noindex: .. _CloudSqlInstanceExportOperator: @@ -810,6 +885,7 @@ CloudSqlInstanceExportOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator + :noindex: .. _CloudSqlInstanceImportOperator: @@ -817,6 +893,7 @@ CloudSqlInstanceImportOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator + :noindex: .. _CloudSqlInstanceCreateOperator: @@ -824,6 +901,7 @@ CloudSqlInstanceCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator + :noindex: .. _CloudSqlInstancePatchOperator: @@ -831,12 +909,15 @@ CloudSqlInstancePatchOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator + :noindex: .. _CloudSqlQueryOperator: CloudSqlQueryOperator ^^^^^^^^^^^^^^^^^^^^^ + .. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator + :noindex: Cloud SQL Hooks """"""""""""""" @@ -845,16 +926,19 @@ Cloud SQL Hooks .. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook :members: + :noindex: .. _CloudSqlDatabaseHook: .. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook :members: + :noindex: .. _CloudSqlProxyRunner: .. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlProxyRunner :members: + :noindex: Cloud Bigtable @@ -873,9 +957,10 @@ Cloud Bigtable Operators .. _BigtableInstanceCreateOperator: BigtableInstanceCreateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^" .. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator + :noindex: .. _BigtableInstanceDeleteOperator: @@ -883,6 +968,7 @@ BigtableInstanceDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator + :noindex: .. _BigtableClusterUpdateOperator: @@ -890,6 +976,7 @@ BigtableClusterUpdateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator + :noindex: .. _BigtableTableCreateOperator: @@ -897,6 +984,7 @@ BigtableTableCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator + :noindex: .. _BigtableTableDeleteOperator: @@ -904,6 +992,7 @@ BigtableTableDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator + :noindex: .. _BigtableTableWaitForReplicationSensor: @@ -911,6 +1000,7 @@ BigtableTableWaitForReplicationSensor ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor + :noindex: .. _BigtableHook: @@ -937,6 +1027,7 @@ Compute Engine Operators The operators have the common base operator: .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceBaseOperator + :noindex: They also use :ref:`GceHook` to communicate with Google Cloud Platform. @@ -946,6 +1037,7 @@ GceInstanceStartOperator ^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator + :noindex: .. _GceInstanceStopOperator: @@ -953,6 +1045,7 @@ GceInstanceStopOperator ^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator + :noindex: .. _GceSetMachineTypeOperator: @@ -960,6 +1053,7 @@ GceSetMachineTypeOperator ^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator + :noindex: .. _GceInstanceTemplateCopyOperator: @@ -967,6 +1061,7 @@ GceInstanceTemplateCopyOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator + :noindex: .. _GceInstanceGroupManagerUpdateTemplateOperator: @@ -974,6 +1069,7 @@ GceInstanceGroupManagerUpdateTemplateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator + :noindex: .. _GceHook: @@ -981,7 +1077,8 @@ Compute Engine Hook """"""""""""""""""" .. autoclass:: airflow.contrib.hooks.gcp_compute_hook.GceHook -:members: + :members: + :noindex: Cloud Functions @@ -993,8 +1090,6 @@ Cloud Functions Operators - :ref:`GcfFunctionDeployOperator` : deploy Google Cloud Function to Google Cloud Platform - :ref:`GcfFunctionDeleteOperator` : delete Google Cloud Function in Google Cloud Platform -.. autoclass:: airflow.contrib.operators.gcp_operator.GCP - They also use :ref:`GcfHook` to communicate with Google Cloud Platform. .. _GcfFunctionDeployOperator: @@ -1003,7 +1098,7 @@ GcfFunctionDeployOperator ^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator - + :noindex: .. _GcfFunctionDeleteOperator: @@ -1011,6 +1106,7 @@ GcfFunctionDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator + :noindex: .. _GcfHook: @@ -1019,6 +1115,7 @@ Cloud Functions Hook .. autoclass:: airflow.contrib.hooks.gcp_function_hook.GcfHook :members: + :noindex: Cloud DataFlow @@ -1037,6 +1134,7 @@ DataFlowJavaOperator ^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator + :noindex: .. code:: python @@ -1078,6 +1176,7 @@ DataflowTemplateOperator ^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator + :noindex: .. _DataFlowPythonOperator: @@ -1085,6 +1184,7 @@ DataFlowPythonOperator ^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator + :noindex: DataFlowHook @@ -1092,6 +1192,7 @@ DataFlowHook .. autoclass:: airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook :members: + :noindex: @@ -1119,6 +1220,7 @@ DataprocClusterCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator + :noindex: .. _DataprocClusterScaleOperator: @@ -1126,6 +1228,7 @@ DataprocClusterScaleOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator + :noindex: .. _DataprocClusterDeleteOperator: @@ -1133,6 +1236,7 @@ DataprocClusterDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator + :noindex: .. _DataProcPigOperator: @@ -1140,6 +1244,7 @@ DataProcPigOperator ^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPigOperator + :noindex: .. _DataProcHiveOperator: @@ -1147,6 +1252,7 @@ DataProcHiveOperator ^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHiveOperator + :noindex: .. _DataProcSparkSqlOperator: @@ -1154,6 +1260,7 @@ DataProcSparkSqlOperator ^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator + :noindex: .. _DataProcSparkOperator: @@ -1161,6 +1268,7 @@ DataProcSparkOperator ^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkOperator + :noindex: .. _DataProcHadoopOperator: @@ -1168,6 +1276,7 @@ DataProcHadoopOperator ^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator + :noindex: .. _DataProcPySparkOperator: @@ -1175,6 +1284,7 @@ DataProcPySparkOperator ^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator + :noindex: .. _DataprocWorkflowTemplateInstantiateOperator: @@ -1182,6 +1292,7 @@ DataprocWorkflowTemplateInstantiateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator + :noindex: .. _DataprocWorkflowTemplateInstantiateInlineOperator: @@ -1189,6 +1300,7 @@ DataprocWorkflowTemplateInstantiateInlineOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator + :noindex: Cloud Datastore ''''''''''''''' @@ -1205,6 +1317,7 @@ DatastoreExportOperator ^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator + :noindex: .. _DatastoreImportOperator: @@ -1212,12 +1325,14 @@ DatastoreImportOperator ^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator + :noindex: DatastoreHook """"""""""""" .. autoclass:: airflow.contrib.hooks.datastore_hook.DatastoreHook :members: + :noindex: Cloud ML Engine @@ -1238,6 +1353,7 @@ MLEngineBatchPredictionOperator .. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator :members: + :noindex: .. _MLEngineModelOperator: @@ -1246,6 +1362,7 @@ MLEngineModelOperator .. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineModelOperator :members: + :noindex: .. _MLEngineTrainingOperator: @@ -1254,6 +1371,7 @@ MLEngineTrainingOperator .. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator :members: + :noindex: .. _MLEngineVersionOperator: @@ -1262,6 +1380,7 @@ MLEngineVersionOperator .. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator :members: + :noindex: Cloud ML Engine Hook """""""""""""""""""" @@ -1273,6 +1392,7 @@ MLEngineHook .. autoclass:: airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook :members: + :noindex: Cloud Storage @@ -1298,6 +1418,7 @@ FileToGoogleCloudStorageOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator + :noindex: .. _GoogleCloudStorageCreateBucketOperator: @@ -1305,6 +1426,7 @@ GoogleCloudStorageBucketCreateAclEntryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator + :noindex: .. _GoogleCloudStorageBucketCreateAclEntryOperator: @@ -1312,6 +1434,7 @@ GoogleCloudStorageCreateBucketOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator + :noindex: .. _GoogleCloudStorageDownloadOperator: @@ -1319,6 +1442,7 @@ GoogleCloudStorageDownloadOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator + :noindex: .. _GoogleCloudStorageListOperator: @@ -1326,6 +1450,7 @@ GoogleCloudStorageListOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator + :noindex: .. _GoogleCloudStorageToBigQueryOperator: @@ -1333,6 +1458,7 @@ GoogleCloudStorageObjectCreateAclEntryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator + :noindex: .. _GoogleCloudStorageObjectCreateAclEntryOperator: @@ -1340,6 +1466,7 @@ GoogleCloudStorageToBigQueryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator + :noindex: .. _GoogleCloudStorageToGoogleCloudStorageOperator: @@ -1347,6 +1474,7 @@ GoogleCloudStorageToGoogleCloudStorageOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator + :noindex: .. _GoogleCloudStorageToGoogleCloudStorageTransferOperator: @@ -1354,6 +1482,7 @@ GoogleCloudStorageToGoogleCloudStorageTransferOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcs_to_gcs_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator + :noindex: .. _MySqlToGoogleCloudStorageOperator: @@ -1361,18 +1490,21 @@ MySqlToGoogleCloudStorageOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator + :noindex: GoogleCloudStorageHook """""""""""""""""""""" .. autoclass:: airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook :members: + :noindex: GCPTransferServiceHook """""""""""""""""""""" .. autoclass:: airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook :members: + :noindex: Google Kubernetes Engine '''''''''''''''''''''''' @@ -1387,18 +1519,24 @@ GKEClusterCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator + :noindex: + .. _GKEClusterCreateOperator: GKEClusterDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator + :noindex: + .. _GKEClusterDeleteOperator: GKEPodOperator ^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEPodOperator + :noindex: + .. _GKEPodOperator: Google Kubernetes Engine Hook @@ -1406,7 +1544,7 @@ Google Kubernetes Engine Hook .. autoclass:: airflow.contrib.hooks.gcp_container_hook.GKEClusterHook :members: - + :noindex: .. _Qubole: @@ -1420,24 +1558,28 @@ QuboleOperator '''''''''''''' .. autoclass:: airflow.contrib.operators.qubole_operator.QuboleOperator + :noindex: QubolePartitionSensor ''''''''''''''''''''' .. autoclass:: airflow.contrib.sensors.qubole_sensor.QubolePartitionSensor - + :noindex: QuboleFileSensor '''''''''''''''' .. autoclass:: airflow.contrib.sensors.qubole_sensor.QuboleFileSensor + :noindex: QuboleCheckOperator ''''''''''''''''''' .. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator + :noindex: QuboleValueCheckOperator '''''''''''''''''''''''' .. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator + :noindex: diff --git a/docs/kubernetes.rst b/docs/kubernetes.rst index 356c5115ba0f2..94b052f9fb3d8 100644 --- a/docs/kubernetes.rst +++ b/docs/kubernetes.rst @@ -131,6 +131,10 @@ Kubernetes Operator .. autoclass:: airflow.contrib.operators.kubernetes_pod_operator.KubernetesPodOperator + :noindex: + .. autoclass:: airflow.contrib.kubernetes.secret.Secret + :noindex: + diff --git a/docs/plugins.rst b/docs/plugins.rst index e68eee3f54f15..aca36b070ea49 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -271,7 +271,6 @@ component with the pattern `airflow.{component}.{name}.{component_class_name}`. } ) -:: This will create a hook, and an operator accessible at: - `airflow.hooks.my_namespace.MyHook` diff --git a/docs/profiling.rst b/docs/profiling.rst index 85c5ecc4f3a22..1e7ba22760853 100644 --- a/docs/profiling.rst +++ b/docs/profiling.rst @@ -21,6 +21,7 @@ Data Profiling ============== .. note:: + ``Adhoc Queries`` and ``Charts`` are no longer supported in the new FAB-based webserver and UI, due to security concerns. diff --git a/docs/security.rst b/docs/security.rst index 9e267113dbc30..3e63866c08543 100644 --- a/docs/security.rst +++ b/docs/security.rst @@ -277,7 +277,7 @@ and in your DAG, when initializing the HiveOperator, specify: To use kerberos authentication, you must install Airflow with the `kerberos` extras group: -.. code-block:: base +.. code-block:: bash pip install apache-airflow[kerberos] @@ -310,7 +310,7 @@ to only members of those teams. To use GHE authentication, you must install Airflow with the `github_enterprise` extras group: -.. code-block:: base +.. code-block:: bash pip install apache-airflow[github_enterprise] @@ -358,7 +358,7 @@ login, separated with a comma, to only members of those domains. To use Google authentication, you must install Airflow with the `google_auth` extras group: -.. code-block:: base +.. code-block:: bash pip install apache-airflow[google_auth] From 2c7bb17435926628914571f14dad3c45bc53753b Mon Sep 17 00:00:00 2001 From: Anatoli Babenia Date: Fri, 25 Jan 2019 16:50:41 +0300 Subject: [PATCH 0006/1104] [AIRFLOW-XXX] Automatically link Jira/GH on doc's changelog page (#4587) --- docs/_templates/layout.html | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 docs/_templates/layout.html diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 0000000000000..15822dc18d039 --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,30 @@ + + +{% extends "!layout.html" %} + +{% block extrahead %} + {{ super() }} + +{% endblock %} From c2f48ed6c8b1ca1487fac7637cf1dad91afc4c2f Mon Sep 17 00:00:00 2001 From: Tao Feng Date: Fri, 25 Jan 2019 09:41:26 -0800 Subject: [PATCH 0007/1104] [AIRFLOW-3745] Fix viewer not able to view dag details (#4569) --- airflow/www/security.py | 20 ++++++++++++++------ docs/howto/add-new-role.rst | 1 + tests/www/test_views.py | 20 ++++++++++++++++++++ 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/airflow/www/security.py b/airflow/www/security.py index f9f90f17944a7..5ee2e8938d6ce 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -140,7 +140,7 @@ ROLE_CONFIGS = [ { 'role': 'Viewer', - 'perms': viewer_perms, + 'perms': viewer_perms | dag_perms, 'vms': viewer_vms | dag_vms }, { @@ -355,11 +355,6 @@ def merge_pv(perm, view_menu): if pv.permission and pv.view_menu: all_pvs.add((pv.permission.name, pv.view_menu.name)) - # create perm for global logical dag - for dag in dag_vms: - for perm in dag_perms: - merge_pv(perm, dag) - # Get all the active / paused dags and insert them into a set all_dags_models = session.query(models.DagModel)\ .filter(or_(models.DagModel.is_active, models.DagModel.is_paused))\ @@ -424,6 +419,7 @@ def update_admin_perm_view(self): if p not in existing_perms_vms: existing_perms_vms.add(p) admin.permissions = list(existing_perms_vms) + self.get_session.commit() def sync_roles(self): @@ -435,6 +431,8 @@ def sync_roles(self): :return: None. """ logging.info('Start syncing user roles.') + # Create global all-dag VM + self.create_perm_vm_for_all_dag() # Create default user role. for config in ROLE_CONFIGS: @@ -460,3 +458,13 @@ def sync_perm_for_dag(self, dag_id): perm_on_dag = self.find_permission_view_menu(dag_perm, dag_id) if perm_on_dag is None: self.add_permission_view_menu(dag_perm, dag_id) + + def create_perm_vm_for_all_dag(self): + """ + Create perm-vm if not exist and insert into FAB security model for all-dags. + """ + # create perm for global logical dag + for dag_vm in dag_vms: + for perm in dag_perms: + self._merge_perm(permission_name=perm, + view_menu_name=dag_vm) diff --git a/docs/howto/add-new-role.rst b/docs/howto/add-new-role.rst index 8d221031859be..60d718ed1eb77 100644 --- a/docs/howto/add-new-role.rst +++ b/docs/howto/add-new-role.rst @@ -31,3 +31,4 @@ and click ``List Roles`` in the new UI. The image shows a role which could only write to example_python_operator is created. And we could assign the given role to a new user using ``airflow users --add-role`` cli command. +Default roles(Admin, User, Viewer, Op) shiped with RBAC could view the details for every dag. diff --git a/tests/www/test_views.py b/tests/www/test_views.py index 0b7124f671043..ac5a3e42f3f14 100644 --- a/tests/www/test_views.py +++ b/tests/www/test_views.py @@ -33,6 +33,7 @@ from flask._compat import PY2 from parameterized import parameterized from urllib.parse import quote_plus + from werkzeug.test import Client from airflow import configuration as conf @@ -1035,6 +1036,17 @@ def login(self, username=None, password=None): role=role_user, password='test_user') + role_viewer = self.appbuilder.sm.find_role('User') + test_viewer = self.appbuilder.sm.find_user(username='test_viewer') + if not test_viewer: + self.appbuilder.sm.add_user( + username='test_viewer', + first_name='test_viewer', + last_name='test_viewer', + email='test_viewer@fab.org', + role=role_viewer, + password='test_viewer') + dag_acl_role = self.appbuilder.sm.add_role('dag_acl_tester') dag_tester = self.appbuilder.sm.find_user(username='dag_tester') if not dag_tester: @@ -1522,6 +1534,14 @@ def test_log_success_for_user(self): self.check_content_in_response('"message":', resp) self.check_content_in_response('"metadata":', resp) + def test_tree_view_for_viewer(self): + self.logout() + self.login(username='test_viewer', + password='test_viewer') + url = 'tree?dag_id=example_bash_operator' + resp = self.client.get(url, follow_redirects=True) + self.check_content_in_response('runme_1', resp) + class TestTaskInstanceView(TestBase): TI_ENDPOINT = '/taskinstance/list/?_flt_0_execution_date={}' From 34e3485b7f1785a6a3448247c49323995bd009d7 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Fri, 25 Jan 2019 20:56:57 +0000 Subject: [PATCH 0008/1104] [AIRFLOW-XXX] Mock optional modules when building docs (#4586) --- docs/conf.py | 56 +++++++++++++++++++++++++++++++++++++++++----------- setup.py | 1 - 2 files changed, 44 insertions(+), 13 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0a9d72d951a94..cf31d5e57ded5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -33,28 +33,60 @@ # serve to show the default. import os import sys -import mock import airflow -MOCK_MODULES = [ - 'google.auth.default', - 'google.oauth2.service_account', +autodoc_mock_imports = [ + 'MySQLdb', + 'adal', + 'analytics', + 'azure', + 'azure.cosmos', + 'azure.datalake', + 'azure.mgmt', + 'boto3', + 'botocore', + 'bson', + 'cassandra', + 'celery', + 'cloudant', + 'cx_Oracle', + 'datadog', + 'docker', + 'google', 'google_auth_httplib2', 'googleapiclient', - 'googleapiclient.discovery', - 'googleapiclient.errors', - 'googleapiclient.http', + 'hdfs', + 'httplib2', + 'jaydebeapi', + 'jenkins', + 'jira', + 'kubernetes', 'mesos', - 'mesos.interface', - 'mesos.native', - 'pandas.io.gbq', + 'msrestazure', + 'pandas', + 'pandas_gbq', + 'paramiko', + 'pinotdb', + 'psycopg2', + 'pydruid', + 'pyhive', + 'pyhive', + 'pymongo', 'pymssql', + 'pysftp', + 'qds_sdk', + 'redis', 'simple_salesforce', + 'slackclient', + 'smbclient', + 'snowflake', + 'sshtunnel', + 'tenacity', 'vertica_python', + 'winrm', + 'zdesk', ] -for mod_name in MOCK_MODULES: - sys.modules[mod_name] = mock.Mock() # Hack to allow changing for piece of the code to behave differently while # the docs are being built. The main objective was to alter the diff --git a/setup.py b/setup.py index e872b9cec54c0..195da0c798eef 100644 --- a/setup.py +++ b/setup.py @@ -156,7 +156,6 @@ def write_version(filename=os.path.join(*['airflow', databricks = ['requests>=2.20.0, <3'] datadog = ['datadog>=0.14.0'] doc = [ - 'mock', 'sphinx>=1.2.3', 'sphinx-argparse>=0.1.13', 'sphinx-rtd-theme>=0.1.6', From b6f207ff77ac706f08a65336cabb7140a9cd9816 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Fri, 25 Jan 2019 21:00:09 +0000 Subject: [PATCH 0009/1104] [AIRFLOW-XXX] Removes Data Profiling docs as it is not supported in RBAC UI --- docs/profiling.rst | 63 ---------------------------------------------- 1 file changed, 63 deletions(-) delete mode 100644 docs/profiling.rst diff --git a/docs/profiling.rst b/docs/profiling.rst deleted file mode 100644 index 1e7ba22760853..0000000000000 --- a/docs/profiling.rst +++ /dev/null @@ -1,63 +0,0 @@ -.. Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - -.. http://www.apache.org/licenses/LICENSE-2.0 - -.. Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - -.. TODO: This section would be removed after we migrate to www_rbac completely. - -Data Profiling -============== - -.. note:: - - ``Adhoc Queries`` and ``Charts`` are no longer supported in the new FAB-based webserver - and UI, due to security concerns. - -Part of being productive with data is having the right weapons to -profile the data you are working with. Airflow provides a simple query -interface to write SQL and get results quickly, and a charting application -letting you visualize data. - -Adhoc Queries -------------- -The adhoc query UI allows for simple SQL interactions with the database -connections registered in Airflow. - -.. image:: img/adhoc.png - -Charts ------- -A simple UI built on top of flask-admin and highcharts allows building -data visualizations and charts easily. Fill in a form with a label, SQL, -chart type, pick a source database from your environment's connections, -select a few other options, and save it for later use. - -You can even use the same templating and macros available when writing -airflow pipelines, parameterizing your queries and modifying parameters -directly in the URL. - -These charts are basic, but they're easy to create, modify and share. - -Chart Screenshot -................ - -.. image:: img/chart.png - ------ - -Chart Form Screenshot -..................... - -.. image:: img/chart_form.png From 140101815f824cb3c67394484af9cb9b1db1f4d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20L=C3=A9one?= Date: Fri, 25 Jan 2019 22:04:05 +0100 Subject: [PATCH 0010/1104] [AIRFLOW-3764] Simplify chained comparisons in IF block (#4580) --- airflow/bin/cli.py | 2 +- airflow/contrib/utils/sendgrid.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index 999474f76b824..c15d0212b5a14 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -748,7 +748,7 @@ def wait_until_true(fn, timeout=0): """ t = time.time() while not fn(): - if 0 < timeout and timeout <= time.time() - t: + if 0 < timeout <= time.time() - t: raise AirflowWebServerTimeout( "No response from gunicorn master within {0} seconds" .format(timeout)) diff --git a/airflow/contrib/utils/sendgrid.py b/airflow/contrib/utils/sendgrid.py index 1b932bcb9bdea..cc0d4d841ce1c 100644 --- a/airflow/contrib/utils/sendgrid.py +++ b/airflow/contrib/utils/sendgrid.py @@ -112,7 +112,7 @@ def _post_sendgrid_mail(mail_data): sg = sendgrid.SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY')) response = sg.client.mail.send.post(request_body=mail_data) # 2xx status code. - if response.status_code >= 200 and response.status_code < 300: + if 200 <= response.status_code < 300: log.info('Email with subject %s is successfully sent to recipients: %s', mail_data['subject'], mail_data['personalizations']) else: From 31e1878ecd63f6438fc426bb73feeaa1e427c1bf Mon Sep 17 00:00:00 2001 From: yangaws <31293788+yangaws@users.noreply.github.com> Date: Fri, 25 Jan 2019 13:15:22 -0800 Subject: [PATCH 0011/1104] [AIRFLOW-3719] Handle StopIteration in CloudWatch logs retrieval (#4516) --- airflow/contrib/hooks/sagemaker_hook.py | 10 +++++++++- tests/contrib/hooks/test_sagemaker_hook.py | 8 ++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/airflow/contrib/hooks/sagemaker_hook.py b/airflow/contrib/hooks/sagemaker_hook.py index 55e62a78e238f..8ff478649de1a 100644 --- a/airflow/contrib/hooks/sagemaker_hook.py +++ b/airflow/contrib/hooks/sagemaker_hook.py @@ -304,7 +304,15 @@ def multi_stream_iter(self, log_group, streams, positions=None): positions = positions or {s: Position(timestamp=0, skip=0) for s in streams} event_iters = [self.log_stream(log_group, s, positions[s].timestamp, positions[s].skip) for s in streams] - events = [next(s) if s else None for s in event_iters] + events = [] + for s in event_iters: + if not s: + events.append(None) + continue + try: + events.append(next(s)) + except StopIteration: + events.append(None) while any(events): i = argmin(events, lambda x: x['timestamp'] if x else 9999999999) diff --git a/tests/contrib/hooks/test_sagemaker_hook.py b/tests/contrib/hooks/test_sagemaker_hook.py index bec00bf601a8f..02dad6cb851e5 100644 --- a/tests/contrib/hooks/test_sagemaker_hook.py +++ b/tests/contrib/hooks/test_sagemaker_hook.py @@ -256,6 +256,14 @@ class TestSageMakerHook(unittest.TestCase): def setUp(self): configuration.load_test_config() + @mock.patch.object(SageMakerHook, 'log_stream') + def test_multi_stream_iter(self, mock_log_stream): + event = {'timestamp': 1} + mock_log_stream.side_effect = [iter([event]), iter([]), None] + hook = SageMakerHook() + event_iter = hook.multi_stream_iter('log', [None, None, None]) + self.assertEqual(next(event_iter), (0, event)) + @mock.patch.object(S3Hook, 'create_bucket') @mock.patch.object(S3Hook, 'load_file') def test_configure_s3_resources(self, mock_load_file, mock_create_bucket): From db06d4fd71a191a64e6461c03fd01be20ce1eb11 Mon Sep 17 00:00:00 2001 From: Ryan Yuan Date: Sat, 26 Jan 2019 08:18:01 +1100 Subject: [PATCH 0012/1104] [AIRFLOW-3490] Add BigQueryHook's Ability to Patch Table/View (#4299) --- airflow/contrib/hooks/bigquery_hook.py | 110 ++++++++++++++++++++++ tests/contrib/hooks/test_bigquery_hook.py | 76 +++++++++++++++ 2 files changed, 186 insertions(+) diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py index 9a847c7a6106a..e3075ba9b8573 100644 --- a/airflow/contrib/hooks/bigquery_hook.py +++ b/airflow/contrib/hooks/bigquery_hook.py @@ -505,6 +505,116 @@ def create_external_table(self, 'BigQuery job failed. Error was: {}'.format(err.content) ) + def patch_table(self, + dataset_id, + table_id, + project_id=None, + description=None, + expiration_time=None, + external_data_configuration=None, + friendly_name=None, + labels=None, + schema=None, + time_partitioning=None, + view=None, + require_partition_filter=None): + """ + Patch information in an existing table. + It only updates fileds that are provided in the request object. + + Reference: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/patch + + :param dataset_id: The dataset containing the table to be patched. + :type dataset_id: str + :param table_id: The Name of the table to be patched. + :type table_id: str + :param project_id: The project containing the table to be patched. + :type project_id: str + :param description: [Optional] A user-friendly description of this table. + :type description: str + :param expiration_time: [Optional] The time when this table expires, + in milliseconds since the epoch. + :type expiration_time: int + :param external_data_configuration: [Optional] A dictionary containing + properties of a table stored outside of BigQuery. + :type external_data_configuration: dict + :param friendly_name: [Optional] A descriptive name for this table. + :type friendly_name: str + :param labels: [Optional] A dictionary containing labels associated with this table. + :type labels: dict + :param schema: [Optional] If set, the schema field list as defined here: + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema + The supported schema modifications and unsupported schema modification are listed here: + https://cloud.google.com/bigquery/docs/managing-table-schemas + :type schema: list + + **Example**: :: + + schema=[{"name": "emp_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"}] + + :param time_partitioning: [Optional] A dictionary containing time-based partitioning + definition for the table. + :type time_partitioning: dict + :param view: [Optional] A dictionary containing definition for the view. + If set, it will patch a view instead of a table: + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#view + :type view: dict + + **Example**: :: + + view = { + "query": "SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*` LIMIT 500", + "useLegacySql": False + } + + :param require_partition_filter: [Optional] If true, queries over the this table require a + partition filter. If false, queries over the table + :type require_partition_filter: bool + + """ + + project_id = project_id if project_id is not None else self.project_id + + table_resource = {} + + if description is not None: + table_resource['description'] = description + if expiration_time is not None: + table_resource['expirationTime'] = expiration_time + if external_data_configuration: + table_resource['externalDataConfiguration'] = external_data_configuration + if friendly_name is not None: + table_resource['friendlyName'] = friendly_name + if labels: + table_resource['labels'] = labels + if schema: + table_resource['schema'] = {'fields': schema} + if time_partitioning: + table_resource['timePartitioning'] = time_partitioning + if view: + table_resource['view'] = view + if require_partition_filter is not None: + table_resource['requirePartitionFilter'] = require_partition_filter + + self.log.info('Patching Table %s:%s.%s', + project_id, dataset_id, table_id) + + try: + self.service.tables().patch( + projectId=project_id, + datasetId=dataset_id, + tableId=table_id, + body=table_resource).execute() + + self.log.info('Table patched successfully: %s:%s.%s', + project_id, dataset_id, table_id) + + except HttpError as err: + raise AirflowException( + 'BigQuery job failed. Error was: {}'.format(err.content) + ) + def run_query(self, sql, destination_dataset_table=None, diff --git a/tests/contrib/hooks/test_bigquery_hook.py b/tests/contrib/hooks/test_bigquery_hook.py index 8002043c755f0..9bae4abb2a642 100644 --- a/tests/contrib/hooks/test_bigquery_hook.py +++ b/tests/contrib/hooks/test_bigquery_hook.py @@ -409,6 +409,82 @@ def test_create_view(self): } method.assert_called_once_with(projectId=project_id, datasetId=dataset_id, body=body) + @mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration') + def test_patch_table(self, run_with_config): + project_id = 'bq-project' + dataset_id = 'bq_dataset' + table_id = 'bq_table' + + description_patched = 'Test description.' + expiration_time_patched = 2524608000000 + friendly_name_patched = 'Test friendly name.' + labels_patched = {'label1': 'test1', 'label2': 'test2'} + schema_patched = [ + {'name': 'id', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'name', 'type': 'STRING', 'mode': 'NULLABLE'}, + {'name': 'balance', 'type': 'FLOAT', 'mode': 'NULLABLE'}, + {'name': 'new_field', 'type': 'STRING', 'mode': 'NULLABLE'} + ] + time_partitioning_patched = { + 'expirationMs': 10000000 + } + require_partition_filter_patched = True + + mock_service = mock.Mock() + method = (mock_service.tables.return_value.patch) + cursor = hook.BigQueryBaseCursor(mock_service, project_id) + cursor.patch_table( + dataset_id, table_id, project_id, + description=description_patched, + expiration_time=expiration_time_patched, + friendly_name=friendly_name_patched, + labels=labels_patched, schema=schema_patched, + time_partitioning=time_partitioning_patched, + require_partition_filter=require_partition_filter_patched + ) + + body = { + "description": description_patched, + "expirationTime": expiration_time_patched, + "friendlyName": friendly_name_patched, + "labels": labels_patched, + "schema": { + "fields": schema_patched + }, + "timePartitioning": time_partitioning_patched, + "requirePartitionFilter": require_partition_filter_patched + } + method.assert_called_once_with( + projectId=project_id, + datasetId=dataset_id, + tableId=table_id, + body=body + ) + + @mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration') + def test_patch_view(self, run_with_config): + project_id = 'bq-project' + dataset_id = 'bq_dataset' + view_id = 'bq_view' + view_patched = { + 'query': "SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*` LIMIT 500", + 'useLegacySql': False + } + + mock_service = mock.Mock() + method = (mock_service.tables.return_value.patch) + cursor = hook.BigQueryBaseCursor(mock_service, project_id) + cursor.patch_table(dataset_id, view_id, project_id, view=view_patched) + body = { + 'view': view_patched + } + method.assert_called_once_with( + projectId=project_id, + datasetId=dataset_id, + tableId=view_id, + body=body + ) + class TestBigQueryCursor(unittest.TestCase): @mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration') From f27b5252fe07869ebe7999207741072d24c0eced Mon Sep 17 00:00:00 2001 From: Jongyoul Lee Date: Sat, 26 Jan 2019 06:21:18 +0900 Subject: [PATCH 0013/1104] [AIRFLOW-3216] HiveServer2Hook need a password with LDAP authentication (#4057) --- airflow/hooks/hive_hooks.py | 1 + tests/hooks/test_hive_hook.py | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py index c1e9c3bf37200..4c5788b874e34 100644 --- a/airflow/hooks/hive_hooks.py +++ b/airflow/hooks/hive_hooks.py @@ -787,6 +787,7 @@ def get_conn(self, schema=None): auth=auth_mechanism, kerberos_service_name=kerberos_service_name, username=db.login or username, + password=db.password, database=schema or db.schema or 'default') def _get_results(self, hql, schema='default', fetch_size=None, hive_conf=None): diff --git a/tests/hooks/test_hive_hook.py b/tests/hooks/test_hive_hook.py index 1cac74c6cd846..22ccb28d782e3 100644 --- a/tests/hooks/test_hive_hook.py +++ b/tests/hooks/test_hive_hook.py @@ -403,6 +403,27 @@ def test_get_conn(self): hook = HiveServer2Hook() hook.get_conn() + @mock.patch('pyhive.hive.connect') + def test_get_conn_with_password(self, mock_connect): + from airflow.hooks.base_hook import CONN_ENV_PREFIX + conn_id = "conn_with_password" + conn_env = CONN_ENV_PREFIX + conn_id.upper() + conn_value = os.environ.get(conn_env) + os.environ[conn_env] = "jdbc+hive2://conn_id:conn_pass@localhost:10000/default?authMechanism=LDAP" + + HiveServer2Hook(hiveserver2_conn_id=conn_id).get_conn() + mock_connect.assert_called_with( + host='localhost', + port=10000, + auth='LDAP', + kerberos_service_name=None, + username='conn_id', + password='conn_pass', + database='default') + + if conn_value: + os.environ[conn_env] = conn_value + def test_get_records(self): hook = HiveServer2Hook() query = "SELECT * FROM {}".format(self.table) From aed71a794423cfac819dd576d9111dc2f527e463 Mon Sep 17 00:00:00 2001 From: Felix Date: Fri, 25 Jan 2019 22:22:43 +0100 Subject: [PATCH 0014/1104] [AIRFLOW-3602] Improve ImapHook handling of retrieving no attachments (#4475) --- airflow/contrib/hooks/imap_hook.py | 83 ++++++++++++++++++--------- tests/contrib/hooks/test_imap_hook.py | 53 +++++++++-------- 2 files changed, 83 insertions(+), 53 deletions(-) diff --git a/airflow/contrib/hooks/imap_hook.py b/airflow/contrib/hooks/imap_hook.py index f028d3dda0911..1bc19fcdac380 100644 --- a/airflow/contrib/hooks/imap_hook.py +++ b/airflow/contrib/hooks/imap_hook.py @@ -19,10 +19,10 @@ import email import imaplib -import os +import os.path import re -from airflow import LoggingMixin +from airflow import LoggingMixin, AirflowException from airflow.hooks.base_hook import BaseHook @@ -30,9 +30,7 @@ class ImapHook(BaseHook): """ This hook connects to a mail server by using the imap protocol. - :param imap_conn_id: The connection id that contains the information - used to authenticate the client. - The default value is 'imap_default'. + :param imap_conn_id: The connection id that contains the information used to authenticate the client. :type imap_conn_id: str """ @@ -55,49 +53,63 @@ def has_mail_attachment(self, name, mail_folder='INBOX', check_regex=False): :param name: The name of the attachment that will be searched for. :type name: str :param mail_folder: The mail folder where to look at. - The default value is 'INBOX'. :type mail_folder: str :param check_regex: Checks the name for a regular expression. - The default value is False. :type check_regex: bool :returns: True if there is an attachment with the given name and False if not. :rtype: bool """ - mail_attachments = self._retrieve_mails_attachments_by_name(name, mail_folder, + mail_attachments = self._retrieve_mails_attachments_by_name(name, + mail_folder, check_regex, latest_only=True) return len(mail_attachments) > 0 - def retrieve_mail_attachments(self, name, mail_folder='INBOX', check_regex=False, - latest_only=False): + def retrieve_mail_attachments(self, + name, + mail_folder='INBOX', + check_regex=False, + latest_only=False, + not_found_mode='raise'): """ Retrieves mail's attachments in the mail folder by its name. :param name: The name of the attachment that will be downloaded. :type name: str :param mail_folder: The mail folder where to look at. - The default value is 'INBOX'. :type mail_folder: str :param check_regex: Checks the name for a regular expression. - The default value is False. :type check_regex: bool :param latest_only: If set to True it will only retrieve the first matched attachment. - The default value is False. :type latest_only: bool + :param not_found_mode: Specify what should happen if no attachment has been found. + Supported values are 'raise', 'warn' and 'ignore'. + If it is set to 'raise' it will raise an exception, + if set to 'warn' it will only print a warning and + if set to 'ignore' it won't notify you at all. + :type not_found_mode: str :returns: a list of tuple each containing the attachment filename and its payload. :rtype: a list of tuple """ - mail_attachments = self._retrieve_mails_attachments_by_name(name, mail_folder, + mail_attachments = self._retrieve_mails_attachments_by_name(name, + mail_folder, check_regex, latest_only) + if not mail_attachments: + self._handle_not_found_mode(not_found_mode) + return mail_attachments - def download_mail_attachments(self, name, local_output_directory, mail_folder='INBOX', - check_regex=False, latest_only=False): + def download_mail_attachments(self, + name, + local_output_directory, + mail_folder='INBOX', + check_regex=False, + latest_only=False, + not_found_mode='raise'): """ - Downloads mail's attachments in the mail folder by its name - to the local directory. + Downloads mail's attachments in the mail folder by its name to the local directory. :param name: The name of the attachment that will be downloaded. :type name: str @@ -105,22 +117,40 @@ def download_mail_attachments(self, name, local_output_directory, mail_folder='I where the files will be downloaded to. :type local_output_directory: str :param mail_folder: The mail folder where to look at. - The default value is 'INBOX'. :type mail_folder: str :param check_regex: Checks the name for a regular expression. - The default value is False. :type check_regex: bool :param latest_only: If set to True it will only download the first matched attachment. - The default value is False. :type latest_only: bool + :param not_found_mode: Specify what should happen if no attachment has been found. + Supported values are 'raise', 'warn' and 'ignore'. + If it is set to 'raise' it will raise an exception, + if set to 'warn' it will only print a warning and + if set to 'ignore' it won't notify you at all. + :type not_found_mode: str """ - mail_attachments = self._retrieve_mails_attachments_by_name(name, mail_folder, - check_regex, latest_only) + mail_attachments = self._retrieve_mails_attachments_by_name(name, + mail_folder, + check_regex, + latest_only) + + if not mail_attachments: + self._handle_not_found_mode(not_found_mode) + self._create_files(mail_attachments, local_output_directory) - def _retrieve_mails_attachments_by_name(self, name, mail_folder, check_regex, - latest_only): + def _handle_not_found_mode(self, not_found_mode): + if not_found_mode is 'raise': + raise AirflowException('No mail attachments found!') + elif not_found_mode is 'warn': + self.log.warning('No mail attachments found!') + elif not_found_mode is 'ignore': + pass # Do not notify if the attachment has not been found. + else: + self.log.error('Invalid "not_found_mode" %s', not_found_mode) + + def _retrieve_mails_attachments_by_name(self, name, mail_folder, check_regex, latest_only): all_matching_attachments = [] self.mail_client.select(mail_folder) @@ -164,6 +194,8 @@ def _create_files(self, mail_attachments, local_output_directory): self._create_file(name, payload, local_output_directory) def _is_symlink(self, name): + # IMPORTANT NOTE: os.path.islink is not working for windows symlinks + # See: https://stackoverflow.com/a/11068434 return os.path.islink(name) def _is_escaping_current_directory(self, name): @@ -210,7 +242,6 @@ def get_attachments_by_name(self, name, check_regex, find_first=False): :param check_regex: Checks the name for a regular expression. :type check_regex: bool :param find_first: If set to True it will only find the first match and then quit. - The default value is False. :type find_first: bool :returns: a list of tuples each containing name and payload where the attachments name matches the given name. diff --git a/tests/contrib/hooks/test_imap_hook.py b/tests/contrib/hooks/test_imap_hook.py index 034d587880217..57dbf532d51a1 100644 --- a/tests/contrib/hooks/test_imap_hook.py +++ b/tests/contrib/hooks/test_imap_hook.py @@ -22,7 +22,7 @@ from mock import Mock, patch, mock_open -from airflow import configuration +from airflow import configuration, AirflowException from airflow.contrib.hooks.imap_hook import ImapHook from airflow.models.connection import Connection from airflow.utils import db @@ -132,9 +132,7 @@ def test_retrieve_mail_attachments_not_found(self, mock_imaplib): _create_fake_imap(mock_imaplib, with_mail=True) with ImapHook() as imap_hook: - attachments_in_inbox = imap_hook.retrieve_mail_attachments('test1.txt') - - self.assertEqual(attachments_in_inbox, []) + self.assertRaises(AirflowException, imap_hook.retrieve_mail_attachments, 'test1.txt') @patch(imaplib_string) def test_retrieve_mail_attachments_with_regex_found(self, mock_imaplib): @@ -153,12 +151,10 @@ def test_retrieve_mail_attachments_with_regex_not_found(self, mock_imaplib): _create_fake_imap(mock_imaplib, with_mail=True) with ImapHook() as imap_hook: - attachments_in_inbox = imap_hook.retrieve_mail_attachments( - name=r'test_(\d+).csv', - check_regex=True - ) - - self.assertEqual(attachments_in_inbox, []) + self.assertRaises(AirflowException, + imap_hook.retrieve_mail_attachments, + name=r'test_(\d+).csv', + check_regex=True) @patch(imaplib_string) def test_retrieve_mail_attachments_latest_only(self, mock_imaplib): @@ -181,7 +177,7 @@ def test_download_mail_attachments_found(self, mock_imaplib, mock_open_method): imap_hook.download_mail_attachments('test1.csv', 'test_directory') mock_open_method.assert_called_once_with('test_directory/test1.csv', 'wb') - mock_open_method().write.assert_called_once_with(b'SWQsTmFtZQoxLEZlbGl4') + mock_open_method.return_value.write.assert_called_once_with(b'SWQsTmFtZQoxLEZlbGl4') @patch(open_string, new_callable=mock_open) @patch(imaplib_string) @@ -189,10 +185,11 @@ def test_download_mail_attachments_not_found(self, mock_imaplib, mock_open_metho _create_fake_imap(mock_imaplib, with_mail=True) with ImapHook() as imap_hook: - imap_hook.download_mail_attachments('test1.txt', 'test_directory') + self.assertRaises(AirflowException, + imap_hook.download_mail_attachments, 'test1.txt', 'test_directory') mock_open_method.assert_not_called() - mock_open_method().write.assert_not_called() + mock_open_method.return_value.write.assert_not_called() @patch(open_string, new_callable=mock_open) @patch(imaplib_string) @@ -207,7 +204,7 @@ def test_download_mail_attachments_with_regex_found(self, mock_imaplib, mock_ope ) mock_open_method.assert_called_once_with('test_directory/test1.csv', 'wb') - mock_open_method().write.assert_called_once_with(b'SWQsTmFtZQoxLEZlbGl4') + mock_open_method.return_value.write.assert_called_once_with(b'SWQsTmFtZQoxLEZlbGl4') @patch(open_string, new_callable=mock_open) @patch(imaplib_string) @@ -215,14 +212,14 @@ def test_download_mail_attachments_with_regex_not_found(self, mock_imaplib, mock _create_fake_imap(mock_imaplib, with_mail=True) with ImapHook() as imap_hook: - imap_hook.download_mail_attachments( - name=r'test_(\d+).csv', - local_output_directory='test_directory', - check_regex=True - ) + self.assertRaises(AirflowException, + imap_hook.download_mail_attachments, + name=r'test_(\d+).csv', + local_output_directory='test_directory', + check_regex=True) mock_open_method.assert_not_called() - mock_open_method().write.assert_not_called() + mock_open_method.return_value.write.assert_not_called() @patch(open_string, new_callable=mock_open) @patch(imaplib_string) @@ -237,7 +234,7 @@ def test_download_mail_attachments_with_latest_only(self, mock_imaplib, mock_ope ) mock_open_method.assert_called_once_with('test_directory/test1.csv', 'wb') - mock_open_method().write.assert_called_once_with(b'SWQsTmFtZQoxLEZlbGl4') + mock_open_method.return_value.write.assert_called_once_with(b'SWQsTmFtZQoxLEZlbGl4') @patch(open_string, new_callable=mock_open) @patch(imaplib_string) @@ -246,26 +243,28 @@ def test_download_mail_attachments_with_escaping_chars(self, mock_imaplib, mock_ with ImapHook() as imap_hook: imap_hook.download_mail_attachments( - name='test1.csv', + name='../test1.csv', local_output_directory='test_directory' ) mock_open_method.assert_not_called() - mock_open_method().write.assert_not_called() + mock_open_method.return_value.write.assert_not_called() + @patch('airflow.contrib.hooks.imap_hook.os.path.islink', return_value=True) @patch(open_string, new_callable=mock_open) @patch(imaplib_string) - def test_download_mail_attachments_with_symlink(self, mock_imaplib, mock_open_method): - _create_fake_imap(mock_imaplib, with_mail=True, attachment_name='initrd.img') + def test_download_mail_attachments_with_symlink(self, mock_imaplib, mock_open_method, mock_is_symlink): + _create_fake_imap(mock_imaplib, with_mail=True, attachment_name='symlink') with ImapHook() as imap_hook: imap_hook.download_mail_attachments( - name='test1.csv', + name='symlink', local_output_directory='test_directory' ) + mock_is_symlink.assert_called_once() mock_open_method.assert_not_called() - mock_open_method().write.assert_not_called() + mock_open_method.return_value.write.assert_not_called() if __name__ == '__main__': From 40f4370324063b05bb60347efe0599582ead313c Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Sat, 26 Jan 2019 16:13:43 -0800 Subject: [PATCH 0015/1104] [AIRFLOW-2190] Fix TypeError when returning 404 (#4596) When processing HTTP response headers, gunicorn checks that the name of each header is a string. Here's the relevant gunicorn code: From gunicorn/http/wsgi.py, line 257 def process_headers(self, headers): for name, value in headers: if not isinstance(name, string_types): raise TypeError('%r is not a string' % name) In Python3, `string_types` is set to the built-in `str`. For Python 2, it's set to `basestring`. Again, the relevant gunicorn code: From gunicorn/six.py, line 38: if PY3: string_types = str, ... else: string_types = basestring, On Python2 the `b''` syntax returns a `str`, but in Python3 it returns `bytes`. `bytes` != `str`, so we get the following error when returning a 404 on Python3: File "/usr/local/lib/python3.6/site-packages/airflow/www/app.py", line 166, in root_app resp(b'404 Not Found', [(b'Content-Type', b'text/plain')]) File "/usr/local/lib/python3.6/site-packages/gunicorn/http/wsgi.py", line 261, in start_response self.process_headers(headers) File "/usr/local/lib/python3.6/site-packages/gunicorn/http/wsgi.py", line 268, in process_headers raise TypeError('%r is not a string' % name) TypeError: b'Content-Type' is not a string Dropping the `b` prefix in favor of the single-quote string syntax should work for both Python2 and 3, as demonstrated below: Python 3.7.2 (default, Jan 13 2019, 12:50:15) [Clang 10.0.0 (clang-1000.11.45.5)] on darwin Type "help", "copyright", "credits" or "license" for more information. >>> isinstance('foo', str) True Python 2.7.15 (default, Jan 12 2019, 21:43:48) [GCC 4.2.1 Compatible Apple LLVM 10.0.0 (clang-1000.11.45.5)] on darwin Type "help", "copyright", "credits" or "license" for more information. >>> isinstance('foo', basestring) True --- airflow/www/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/www/app.py b/airflow/www/app.py index 83d1b9c360f5d..2251dcedecf38 100644 --- a/airflow/www/app.py +++ b/airflow/www/app.py @@ -193,7 +193,7 @@ def shutdown_session(exception=None): def root_app(env, resp): - resp(b'404 Not Found', [(b'Content-Type', b'text/plain')]) + resp(b'404 Not Found', [('Content-Type', 'text/plain')]) return [b'Apache Airflow is not at this location'] From 2f70347bdcd3f735bfc425838cd8a898da238fab Mon Sep 17 00:00:00 2001 From: Tao Feng Date: Sat, 26 Jan 2019 22:49:58 -0800 Subject: [PATCH 0016/1104] [AIRFLOW-3771] Minor refactor securityManager (#4594) --- UPDATING.md | 18 ++++----- airflow/www/security.py | 81 ++++++++++++++++++++------------------ tests/www/test_security.py | 4 +- 3 files changed, 54 insertions(+), 49 deletions(-) diff --git a/UPDATING.md b/UPDATING.md index d8c0333fdad4d..73965a810a424 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -107,14 +107,6 @@ so you might need to update your config. `task_runner = StandardTaskRunner` -### DAG level Access Control for new RBAC UI - -Extend and enhance new Airflow RBAC UI to support DAG level ACL. Each dag now has two permissions(one for write, one for read) associated('can_dag_edit', 'can_dag_read'). -The admin will create new role, associate the dag permission with the target dag and assign that role to users. That user can only access / view the certain dags on the UI -that he has permissions on. If a new role wants to access all the dags, the admin could associate dag permissions on an artificial view(``all_dags``) with that role. - -We also provide a new cli command(``sync_perm``) to allow admin to auto sync permissions. - ### min_file_parsing_loop_time config option temporarily disabled @@ -154,8 +146,16 @@ airflow users --remove-role --username jondoe --role Public ## Airflow 1.10.2 +### DAG level Access Control for new RBAC UI + +Extend and enhance new Airflow RBAC UI to support DAG level ACL. Each dag now has two permissions(one for write, one for read) associated('can_dag_edit', 'can_dag_read'). +The admin will create new role, associate the dag permission with the target dag and assign that role to users. That user can only access / view the certain dags on the UI +that he has permissions on. If a new role wants to access all the dags, the admin could associate dag permissions on an artificial view(``all_dags``) with that role. + +We also provide a new cli command(``sync_perm``) to allow admin to auto sync permissions. + ### Modification to `ts_nodash` macro -`ts_nodash` previously contained TimeZone information alongwith execution date. For Example: `20150101T000000+0000`. This is not user-friendly for file or folder names which was a popular use case for `ts_nodash`. Hence this behavior has been changed and using `ts_nodash` will no longer contain TimeZone information, restoring the pre-1.10 behavior of this macro. And a new macro `ts_nodash_with_tz` has been added which can be used to get a string with execution date and timezone info without dashes. +`ts_nodash` previously contained TimeZone information alongwith execution date. For Example: `20150101T000000+0000`. This is not user-friendly for file or folder names which was a popular use case for `ts_nodash`. Hence this behavior has been changed and using `ts_nodash` will no longer contain TimeZone information, restoring the pre-1.10 behavior of this macro. And a new macro `ts_nodash_with_tz` has been added which can be used to get a string with execution date and timezone info without dashes. Examples: * `ts_nodash`: `20150101T000000` diff --git a/airflow/www/security.py b/airflow/www/security.py index 5ee2e8938d6ce..a81f36e1dc4e9 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -18,7 +18,6 @@ # under the License. # -import logging from flask import g from flask_appbuilder.security.sqla import models as sqla_models from flask_appbuilder.security.sqla.manager import SecurityManager @@ -27,11 +26,12 @@ from airflow import models from airflow.www.app import appbuilder from airflow.utils.db import provide_session +from airflow.utils.log.logging_mixin import LoggingMixin ########################################################################### # VIEW MENUS ########################################################################### -viewer_vms = { +VIEWER_VMS = { 'Airflow', 'DagModelView', 'Browse', @@ -53,9 +53,9 @@ 'VersionView', } -user_vms = viewer_vms +USER_VMS = VIEWER_VMS -op_vms = { +OP_VMS = { 'Admin', 'Configurations', 'ConfigurationView', @@ -73,7 +73,7 @@ # PERMISSIONS ########################################################################### -viewer_perms = { +VIEWER_PERMS = { 'menu_access', 'can_index', 'can_list', @@ -100,7 +100,7 @@ 'can_version', } -user_perms = { +USER_PERMS = { 'can_dagrun_clear', 'can_run', 'can_trigger', @@ -118,17 +118,17 @@ 'can_clear', } -op_perms = { +OP_PERMS = { 'can_conf', 'can_varimport', } # global view-menu for dag-level access -dag_vms = { +DAG_VMS = { 'all_dags' } -dag_perms = { +DAG_PERMS = { 'can_dag_read', 'can_dag_edit', } @@ -140,18 +140,18 @@ ROLE_CONFIGS = [ { 'role': 'Viewer', - 'perms': viewer_perms | dag_perms, - 'vms': viewer_vms | dag_vms + 'perms': VIEWER_PERMS | DAG_PERMS, + 'vms': VIEWER_VMS | DAG_VMS }, { 'role': 'User', - 'perms': viewer_perms | user_perms | dag_perms, - 'vms': viewer_vms | dag_vms | user_vms, + 'perms': VIEWER_PERMS | USER_PERMS | DAG_PERMS, + 'vms': VIEWER_VMS | DAG_VMS | USER_VMS, }, { 'role': 'Op', - 'perms': viewer_perms | user_perms | op_perms | dag_perms, - 'vms': viewer_vms | dag_vms | user_vms | op_vms, + 'perms': VIEWER_PERMS | USER_PERMS | OP_PERMS | DAG_PERMS, + 'vms': VIEWER_VMS | DAG_VMS | USER_VMS | OP_VMS, }, ] @@ -164,7 +164,7 @@ } -class AirflowSecurityManager(SecurityManager): +class AirflowSecurityManager(SecurityManager, LoggingMixin): def init_role(self, role_name, role_vms, role_perms): """ @@ -183,7 +183,7 @@ def init_role(self, role_name, role_vms, role_perms): role = self.add_role(role_name) if len(role.permissions) == 0: - logging.info('Initializing permissions for role:%s in the database.', role_name) + self.log.info('Initializing permissions for role:%s in the database.', role_name) role_pvms = [] for pvm in pvms: if pvm.view_menu.name in role_vms and pvm.permission.name in role_perms: @@ -192,11 +192,14 @@ def init_role(self, role_name, role_vms, role_perms): self.get_session.merge(role) self.get_session.commit() else: - logging.info('Existing permissions for the role:%s within the database will persist.', role_name) + self.log.info('Existing permissions for the role:%s within the database will persist.', role_name) def get_user_roles(self, user=None): """ Get all the roles associated with the user. + + :param user: the ab_user in FAB model. + :return: a list of roles associated with the user. """ if user is None: user = g.user @@ -227,16 +230,16 @@ def get_accessible_dag_ids(self, username=None): username = g.user if username.is_anonymous or 'Public' in username.roles: - # return an empty list if the role is public + # return an empty set if the role is public return set() roles = {role.name for role in username.roles} if {'Admin', 'Viewer', 'User', 'Op'} & roles: - return dag_vms + return DAG_VMS user_perms_views = self.get_all_permissions_views() - # return all dags that the user could access - return set([view for perm, view in user_perms_views if perm in dag_perms]) + # return a set of all dags that the user could access + return set([view for perm, view in user_perms_views if perm in DAG_PERMS]) def has_access(self, permission, view_name, user=None): """ @@ -296,7 +299,7 @@ def clean_perms(self): """ FAB leaves faulty permissions that need to be cleaned up """ - logging.info('Cleaning faulty perms') + self.log.info('Cleaning faulty perms') sesh = self.get_session pvms = ( sesh.query(sqla_models.PermissionView) @@ -308,7 +311,7 @@ def clean_perms(self): deleted_count = pvms.delete() sesh.commit() if deleted_count: - logging.info('Deleted {} faulty permissions'.format(deleted_count)) + self.log.info('Deleted {} faulty permissions'.format(deleted_count)) def _merge_perm(self, permission_name, view_menu_name): """ @@ -334,16 +337,18 @@ def _merge_perm(self, permission_name, view_menu_name): def create_custom_dag_permission_view(self, session=None): """ Workflow: - 1. when scheduler found a new dag, we will create an entry in ab_view_menu - 2. we fetch all the roles associated with dag users. - 3. we join and create all the entries for ab_permission_view_menu - (predefined permissions * dag-view_menus) - 4. Create all the missing role-permission-views for the ab_role_permission_views + 1. Fetch all the existing (permissions, view-menu) from Airflow DB. + 2. Fetch all the existing dag models that are either active or paused. Exclude the subdags. + 3. Create both read and write permission view-menus relation for every dags from step 2 + 4. Find out all the dag specific roles(excluded pubic, admin, viewer, op, user) + 5. Get all the permission-vm owned by the user role. + 6. Grant all the user role's permission-vm except the all-dag view-menus to the dag roles. + 7. Commit the updated permission-vm-role into db :return: None. """ # todo(Tao): should we put this function here or in scheduler loop? - logging.info('Fetching a set of all permission, view_menu from FAB meta-table') + self.log.info('Fetching a set of all permission, view_menu from FAB meta-table') def merge_pv(perm, view_menu): """Create permission view menu only if it doesn't exist""" @@ -360,8 +365,9 @@ def merge_pv(perm, view_menu): .filter(or_(models.DagModel.is_active, models.DagModel.is_paused))\ .filter(~models.DagModel.is_subdag).all() + # create can_dag_edit and can_dag_read permissions for every dag(vm) for dag in all_dags_models: - for perm in dag_perms: + for perm in DAG_PERMS: merge_pv(perm, dag.dag_id) # for all the dag-level role, add the permission of viewer @@ -372,13 +378,12 @@ def merge_pv(perm, view_menu): dag_role = [role for role in all_roles if role.name not in EXISTING_ROLES] update_perm_views = [] - # todo(tao) need to remove all_dag vm + # need to remove all_dag vm from all the existing view-menus dag_vm = self.find_view_menu('all_dags') ab_perm_view_role = sqla_models.assoc_permissionview_role perm_view = self.permissionview_model view_menu = self.viewmenu_model - # todo(tao) comment on the query all_perm_view_by_user = session.query(ab_perm_view_role)\ .join(perm_view, perm_view.id == ab_perm_view_role .columns.permission_view_id)\ @@ -430,7 +435,7 @@ def sync_roles(self): :return: None. """ - logging.info('Start syncing user roles.') + self.log.info('Start syncing user roles.') # Create global all-dag VM self.create_perm_vm_for_all_dag() @@ -449,12 +454,12 @@ def sync_roles(self): def sync_perm_for_dag(self, dag_id): """ Sync permissions for given dag id. The dag id surely exists in our dag bag - as only /refresh button will call this function + as only / refresh button will call this function :param dag_id: :return: """ - for dag_perm in dag_perms: + for dag_perm in DAG_PERMS: perm_on_dag = self.find_permission_view_menu(dag_perm, dag_id) if perm_on_dag is None: self.add_permission_view_menu(dag_perm, dag_id) @@ -464,7 +469,7 @@ def create_perm_vm_for_all_dag(self): Create perm-vm if not exist and insert into FAB security model for all-dags. """ # create perm for global logical dag - for dag_vm in dag_vms: - for perm in dag_perms: + for dag_vm in DAG_VMS: + for perm in DAG_PERMS: self._merge_perm(permission_name=perm, view_menu_name=dag_vm) diff --git a/tests/www/test_security.py b/tests/www/test_security.py index 7948164552581..611effbfa8d4b 100644 --- a/tests/www/test_security.py +++ b/tests/www/test_security.py @@ -30,7 +30,7 @@ from sqlalchemy import Column, Integer, String, Date, Float -from airflow.www.security import AirflowSecurityManager, dag_perms +from airflow.www.security import AirflowSecurityManager, DAG_PERMS logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s') @@ -176,7 +176,7 @@ def test_has_access(self, mock_has_view_access): def test_sync_perm_for_dag(self): test_dag_id = 'TEST_DAG' self.security_manager.sync_perm_for_dag(test_dag_id) - for dag_perm in dag_perms: + for dag_perm in DAG_PERMS: self.assertIsNotNone(self.security_manager. find_permission_view_menu(dag_perm, test_dag_id)) From 5506ef05bc587dc55ba42a96dcc3fca3df141aab Mon Sep 17 00:00:00 2001 From: Tao Feng Date: Sat, 26 Jan 2019 23:10:41 -0800 Subject: [PATCH 0017/1104] [AIRFLOW-XXX] Remove images related profiling doc (#4599) --- docs/img/adhoc.png | Bin 182773 -> 0 bytes docs/img/chart.png | Bin 169382 -> 0 bytes docs/img/chart_form.png | Bin 203224 -> 0 bytes 3 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 docs/img/adhoc.png delete mode 100644 docs/img/chart.png delete mode 100644 docs/img/chart_form.png diff --git a/docs/img/adhoc.png b/docs/img/adhoc.png deleted file mode 100644 index 77ea780624d8768e1aa74556b6c893739866b7c3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 182773 zcmZ^~19W9UvnZU2ZQDGtZB5LH&53Q>oEQ@)6Wg|J+qVDAckg@eu65@>tIt|{_pa*R zRb5@|9VRa;4hM}54Fm)PCn+JK2m}OH0R#ln1quAsqe$;24Fm)$WG*Z$FDWccC~t3L zVs2>+1SA1h{=-FCX&9rUlGHCY5INL0aVnj%{rjcAzp_4UAZe_;0VF7zhq^P^qJKEp zXm~|<)5!_+kOpFPEv55fO>u;O4IR2=@!LVrP8M(L=lelM>SMdj#oFUK_wB(Lkl70K z-i?+P456HAacB(Wf!ueQU1o0x*d+#F2|~~wU^jU!E!a0xHwO^)Jp1{$oBM2&&y5^f zWDLk}RUC7fMAV1}L~qPM!$>1&K|pmTeVSvGiAqf<`r2T~p;zM2rX>t`B@pY8-z-aN zBqI^N{r31)Tn!2=iVj49di_SMkABmOM07@qB54d+DY&Fdi9{If0Jp8+<~ED8AcAPg z{Bb~WYNJLtg!w>)Xau+Gr&gTv8=aaq_7K{nMCT)0MyCRKUrJ?6el(tvcz0R}W8X?? z;?4}Z7Lku|GzBrD3cg3YK)Da3NhLQ@S}3K)1_M3tw-zwM_9C&M8B32C5QkD~0xC;r zIT&=H@`Gzt9Rsx;A_Yz1Ai~RFt#LC-@x9;MmILWdF_9J>9i)^1&y26B>G$3|L@|e; zOS&Y|Lc&PLoqN%lJt#;AL~FVs$=@vU-W8})LXrG?hR^ zzkGVMDCNDR2(GPz;`=1}@~TB5C2+BW8D8iZB>_^{bHWmPLEVW2BOPj4r5c+O6bf_` zQNLXs0$os1$zY>+m5L?uT@%@N#2WN)>viHxc1qe9qDP<#4)OW~AOnF35{pf`9EZTo zEnKwO-EoefAihx&!t`x%48cZ8DBKUprvvq0Q4Gb(bYC^;sZ?TyBYv8v;9;IZrvBL= zo)5)!jqHdO$BzHVr^gCd5m45apr7$D!_{?=px}+f{~nVcGJ773iAX%(Tud$SLa)Vg zf|7THTKo&H|EizhoUuIWA=Wu_LADic_$p*%R4;!vKLS-4I+!;m#Cgw(kj5a95z7&7 zId(y!o*PU0h3H*^dzb{SJI+x!4m9o?C7Vkhym5?TIqt#(S+cMjK~0|W?I-#!;biq; z6T+V%%y8)OJ$dqv<9(?ZRFNU-m+Yjz#y+AQfL%O?94^_3uUrPs!66ikO2u>QH-!nZ#6WcFxBq^pz=?l>oK9a4??3B&m^HVGO9g@9Bs9+ zn0P4hn4MM*&Q=h44=!VntMH_?YETyXB$h$vRXE;(0rC*y+oTzO5w=(ry?AwOTw7RoT{uU*43U1-jC3ZxsRpVk zAP4?#`X6eT=mGtPC2CwrLBkAWY!I4}zx4N)#4K@CquvAYx@8RxtJ%)j9)M1JbPNb= zflaqtni#F4V!GR}2zjvO`ii#HUHNn(>ARD6Pp_#x>2)Hq`{;KTubtnpHl%=Z1g{{O zd*ZH$g+kScqlQRP`*?B_iwXaPdWnFfotBDlPs_AX9Fr#RB}AKfiN#Ci$Bt1P#*Gd| z?OE3(S&~=ABMjl+Fh|KS=W8tXqbbW%gzH9 z%BFbo-6U%@rL;#iXhfubv&xk!o`x>SlX5-AQEg&3p8Ww04aL$YxaGlwvTahiN`x!76p+3jnCXcB8! zHS&96D~rh7%!XsyHh3GocQ$yP#7X=m8Z{b#=VrTSBi0nxuFy7ZnRhNmE7m?hK7ce} zzY8}|6xk}{O72I#Ol~0^o$i=wFY_)_P-r17m%_JydL8i8{C59l;vJJ3r=FiWW0|gA zs-E2(WlK}37RRKd)C8qF%}np1~G`zZ`N1bg)=dZXrr*5mcXW)pj}&&}1H zyzANR(M{c*^yAu|+pN?g&4fkjBBxE7OVy+2W!}c67mp{j7yQfH{^?QrZtGy9L)p06 z^%9;T!E@>>)LX-I{p;=f?0e^<4FUrUBB(ML4{RMID%b_AC$bSn;@4I6(C<(L{VL8*dfQR6?cB>}GH8bhDZ`m3Wi*AZbWG zp6SRq&!Cc~kv$+GCA*rs@T#)(!=m#f<&3Y{=q8GaP02)-lYid!=f#}iEK3noA>Q|* zf};FO$xq5qG-W7v=3FK=<2y*|`!JSsFU-8%U|q0PzK$b)%QDV=H?Ix3D(eO$r|? z?-*i@>RKS06h(R6;t2Rj+V7hCiz5)CAELiRFHJ}%IariD3N1}W0k7&(ro2?}^dI_c zoSGc1=F!U;K@Rw1aA~bp_-yosIOw(<6g%Kui<-J!{y;>*zf^Q=hAs9QUH_iJHu8=V9mi%jnH@t#ftl_Cd4BaO^fQ_1Sutn~hQa z;19LVNxRL4_qKOqh%MinuG#DvpB6iY7rj}cO5s=Uh8MS4{fq4xjWTCdE4j@4igv#G zj`N`=5*fG-xDX$#_tAkpKE3U{3SsC9~3S-ziX3yz0#`AUmv z6l{%I(R^MP(od;MrU4UK9EUC!M{e1-;&iUs#jQtn)eZga`&X40o%q)GX65aBZY3Wz zkNd$5)mDR*pjw=6tam4=77B}WI@z}E=VCKan(P#tmYrlR^KRUa4-uQ;-lwg#TXg5v z$GqhpXpd1BKi;h$Uq75<9t;tL2DdscjGR0ea zTIu-TPs|6IpKCiV;`!@8*Y4IHo+o|4KIUhbONw@bc0@l%jVobw&fU)+T1wa}9zW9C z*u89|HnG}P-#1R*ml3E4vhK#8hMt-}64r}?C#SP79}e#yN5P=|P5=VG(I>}1U7KNi zGs7p~lfR{8_;DwX0OK@K5J$qPhJQfdHzh0J9l?HU1^yO~4%Ej2Y$-Sot&i@mIC$A7 z(50;%LSs;@7<+t@3ydk{7fpWo@~Vpk!hj9LNA>dh%J%r^v*I^#()bPg7UIG^k<}^W z4SyXBNW+@=DdvWsf5E)yJ(pJXE2x07l~8v80zxPI`~N1XNO}bX^exL=SQMY-4Xs$V$gT$3V;nO-M+{V{c@_r6?ly-{@a=yu@aXj<#I%^e!$gbS}(vHuk3U zjGUaD^bAb&OiZ+25VQ_%){gqFwAKzJ|24?}8Ars}!O-5^*3sO?n(*(q`UW;mj=aRg ze<%9CzyEqqV^{P4H2^#6D4FI1kty@FMjbs^U(ia z-t$3IvGRNZ0SN#}iU=yZeml#A@WoSU|1`B;PCK)PCQXbDq>};54=09H1jT8mI`{KQ z%*~bCYR3?zdQrCAs`X_-`i4X`16C4DOqeMEX&xJE#J+6lY?JxyiW;A-H9Z@(l^`e>cDS0D+%~rBo`$i2Br2|AY`E-FodPI6#o^ zJd(QW2*m1Zq53yqeUfhi4>gjS6BQ}7t;qT>`}j`CP~U&FGId}`vQo+*j`O^^?LT|) zoCKUA0-*nBdiRKwtO$(R-j!%OpzscT-ila%`*Uv95R%YeWVEk(dz9Fd@1CLoBHzI``6P6+DI3k*?$OFGw zVIiN3{+n-2NDSw|hSA^u!)8S4MTJXM#D+PodCTOdngfZVg=HVZNe{qB1 z4eCw>+Kw0!o=sK9SnfX92Tn!k-?Z9-z7PeWvc}pT0z{r1E6xP}z1Vu7NJrYxIZU#k zVJCzK&S@i%e`-en|K$4u@)so4ykG`L&4OaW1K}$R*kg^x?9}i-?U&df>zyxLg7|tqj#kS4pEa1i1Yt*$;1%- z_|${$sFNdqLzfo+lZO_}eteQd3Ig$>xh#9=|5`vyb?f?TI6{^!P)KCoud2FFUKtPM^RwXyEeVwek7#Ve#r#wB&k=X^qSsl+|Q;Onx`KA(*8Yx z4}>%S-YLOEf5{a7K`@sZRcP6^A(rs@R_-te(OsOs7=M+9q9fc>ddmwPl_OEpgb0C7 zI3`-n%GsmxeO&u$OT3I1b)>Nz1nmSd{AF(c_8J)l}{ z8p?6(m?j^lrM+3AKa-p@Dv_Jr`J3=#YQI@r2PlO3dcnLJO{Kx&<-O^(8(m!o>lqMZ zW?l^xzEzw1SZW)rzs#j*O_q6FzI5A#@H+R}2s^|-I>vr1_+xP`Ykg!qYabDo;64!P zG$}&4OH+=NCH{CxFYNiJ6vMLm?bVsjzEB3GScRUqwzh`NudS^OHkIN=o{?KM&!?0( zdc0SM%t&#c%pTYMfCP)nwSqh#I+iE6R+A*n)WeoGWAG=z3p63IlC-hx(M*JiVSC}L zjt>&jW>U!h{@{4u_OZAs4k1Nh-6>A}%Y;ltB(4JkIU&}P?y;UTGnp+?%HX4$xMbU% zEXV76?1RgYGr^LEP4?osM;bKMzf^4%HYDs2wGnF~0##@jC&Aid7l^l=)gw60y-|%i zc@AnheO0%_b+QrX{1t*={szKlil9<9#FqKg)d1;5Nx29f&@*i)fb_x{0(Jt)U5;eo=M% z(b>UQJN}@(ycSM4jrXMfaq}%fDcl{@1gk$tIgV@$sUjRZEyPmgko9^Ui%%e-&xBL} z#a#BqD(p8!g6`Ze?Drm}nC$ywjLXjEMNX#Y+$p>LRlHefT0y*7Yv}DkgZTcPAY#dB zY7xaD4Ic%l|Gjg&Y{|b&32BS_P;4s_K6DTq+q&nbt`5do$779j#i*R4n|DkI;m*D% z{~#aaVIk_T*xVq9U%Ls6-$o0AgNc@fzJ!j@e_R43`#==s+8=E6_Fl(+%-?M?`|)|u z=k+}|HOw@@*nm~YO7f}ua^r%dy|pC7oD+;Rg`si^)2QHW8$(8<=ODWOSIj@H5d%?I zMzk%5t}%&kkU9y$Y!q5#DwA3$K>hCGT$hLZX_Yr`w#4LXDAb;Q!C1~Gn1ZleH}Pr> zN|GT|O2Q+;7)K9nLCPTcU3vCvPrjzrOa#7~3jP+VY8G9!tQ#=``;!UXmx-fkt74+< z#MDQ&j(#cP$Bx#!D&r_TxcgyGB7zEponhI!#(L~&}fJva1FU;$aHu;OeA>g-A6(>-M5-BVHG%ZudAHm?Y9fCj) zJ^qA86p!>r9M~9PiwnWdP`}_*y~26dI&-r{NW&F$gNsVT`E@0Rx`s=5P%(H z4p)-#*0R!gTR4}MD61kZihT)#Vuh!ngNB*kglwsvPY7m-cAY?RXmqYHI5QnE(iV>{xODbe2y+`EG__g#|lbE zV#TKocPbmvX=PFjXp$Vlp0Ttn&#sZ8s4QIq(5Pik?O4lH*4=7mzWcH4imnw9flo_a zcgtk{)_SZ|P>)z!CRj%Gyk8|8Ip~VY!V>TnvAHFX-m0?y_Sb`PzAY@M-H)62mzD7D z%NEVPzEAeMkSp3~KX{EX*bj@W(2%BpBwRUo>X{72uw}mGjgheLE%5A!mk> z@9N;=Dg(o$?QVxNnbgLDCW2fd;rT2a5?m*NjD@L-0sp;X@Ee$zW`B)jk zC*mWG?lW{V;^_!YrRVE;W0pLUaT<;l)>0PQsO>WRc2BGZD7*RC792Wis)+bjL9d2`6mlL#+vd>GgRa<{# zqV%%`Q1oL^g~sIZa#)7Qy5pU{_x~wvzfrF-8eVI5$P4cyiQ!S?Rn^um-mVHlyDZWc zqy9x4E_(UOmlo0g)6%jIxU9taN5{V$-VxCF*+QAq-QpA;Nb}c=3+pu*85yrg?)+{_ zMVn{Ri5@e;lkLRuMJG};yR_W8YQY$Zm2M8@O z!}T-`a18Ha?Az}x>6Rjn2A%L+_n|EPisg$R3E_X+*0K0eK*~k> z53j}|yvvt-2zB4zw#%mXgS9>c&v@w2p4;=F>f$1+bK}v{AUlD(^%hK;S*Y7zZkxQ= z)LOJ5r^9$Ij6W4M-$ds=Zvs3CTwhlD=~~+)GB80q9Hs<0{v{9{e_N4oZdh|8KILYG z<}lN3ioVnCxYq;kI`Knc8xdtRSu_v0qn?r!C0d3iY~jK$rh zW&!d@XD6uHd+TM#Y9@%nSn&_Tfyh)Ix0^&NrK?L;W5(>A2>OnvlhR82nHCd!cyvw!3k!?g8O#$^ z96j2(5n(}J%{BgyYlI27inSP@x83pa@zU5)e=rz)uRDdT6@vF8aq$@Zc4#VPE_Q&i z?BcrfYWWM#@^`8-tB(rWHCAi*6%5oh2KG(Q+w0vy1OuFMt3VaqzphuY8?f<9M0Lpb zU)fXJN4Cop@+AIy*4g;&&vu#H4h%>uHi4z7H@J7|P^8(?pv#s-zi@qaL~)$zCZA@} zjt^{eVr~7U)uQB~^>tANgp5Hh9gE{ZotEEb4(66PBV0VM4#fGq&Ga0k5!FKxks;d( z6Bjp*-=C2#uQB`w4yvv4dTt9jJPM!*&IHGX{0ASBG_55c*UwlP*7P91_tn^aN$sZB z!*YnQa`=YYGQ+$ksWgUWN=u7Eo|pqjd@Ep$DhO8JTD4gd%?|?mU?*MruN5; z)kwQP`ELj5HXrwuKJ_NKc8weE7I!t&+torXZ#Sdri0mshO>uf^J;CP_+?PC0TLCUB z=0){6{T}Y_?pC3glwxz1Dv$^nIK)FB2s~ZK-PpX`7fri=wC{yAdi)mc5S6;D;a}_3 zn16j(nwxX(Co08?F5oj+xXF}!WPiSyYF>oj=H%pTqhvqzQ8VakoZcPJO8Sva#_-$> zsYv)W85@c!@g1ZXjs%yWx}4v+^c8zApFgbH9BRYIFtn)c{GV8&0Q9BZUS#=j9Q#ou zR-ngqKXbz4V8Q7cYu&S|=e7kT#+-*Y=r_gIq^`nME6T8R$JFs+`{mPRzV z9&bga;88N6l&4V^-y|3de(K@79}#hn#ojArzsvl5T8)$84P)zt%{D9pXiY;Y%#*h zm}E5n(P0e8P=7D2M_HR8ZuQQt!~HL{i1Fo2`nF$|3y_V zeB-(^4qswuV_I7kHAW2Vt><)-@LexvO$!kVugB@aZ4FGTu2cv>$88@?M~AW3U{TwJ zHhpW$UD35n8`JPMNN?>RkPX&$Ay|q}+CQU3S?!##jXC z$F;!~Qgc98^&mbB^pwK4CxNXf_KmG{LAJx7Z2tFSx#uxNvCab4z5JdA%9F`D3|Ou; zNo3x@wTJZ0w*jV}Y(0D23>!6(4)VA2|`-LywgPt zlIKnhb4%~J4O77{b80(Bl~U?A@F!|bRk%CY6g{J&4X!Y?%$1`_mOo^wLe=R$H=9#X$%=Ziud}pmmlOPJo6juOYyhVHf z`-l|@f0cGPX&~uHtdx7DVag})cE>0%x)8eT#%N1McRj9~%-EJ@kFFkmvCC8S)+_kl zd-Z+fewxvpg46-)OK-rZlXWX_9T|&%An-l?ZqGd|nRM_vTdMBc*Z+*ree2P=oNYAZ zhyw00b=O7*#r#PX2*6}u<^~Zr?>}B;1T0&(p05%4U*`r>VIDY~loZLPu?*v}LafBr zT0C_@@Yx>agbV;PcJ)(?Ko>mF;5PDg%Shtdxs@tfkhe>Sfn8J4KUtmSkNXTzK80V`8Rc5 z%T=>_v`CNjSvh_g=QXw>+05{c$n<5~-tW-*Q0cdm{66)JB?XbLdV=)sL^f63;F?zT zXb%lPwCupRAb;)SA-6Q-Pip*hnHT+AFaeh(WJyxn9|v!@aLz{fAqHoiG0j{wrE%1Q znr}9a1WmZ@n?VKG?n{T#!VUx80k4GbbrN<()729F z6?>2%NtU)6aO`pMIal%O+RWn&zfqjn=vd-%$-Ge?8AaHPiHw zl5`@CsoAo8#?yusgq#)4z3ylMXZ%6cC6T-fIGb z6{VBr?Ueo9&TV~!1|vUq1^|E_z>93RX}cD~@kWUx7A=R>aoq+J5_2eeThOmZJsMYw z%GH7J}djMfxY61T$*KmSI&~cTXO_5vB!C#$$CfCS+Gq7i+<9ZNSt$DTFrN87HEm> zVX^_*Eh}CA1QeLICCWNXA3L@=P&g84Q(qF43w=Vh;q|WfR4YBk% z`VyNXhKD*b8#Yp@%8%8-415JdZrP4Ak`$b)yc?w7#;2XPG3d0+&TrmsCu`HB@3OXf zL=4BX`8(-dw7(a;IwYpA{dM*kVE8Hl_wx{MZuKVpq8?0*#H7eGA zfBdl+sQy^nfr$4lVRoHZ0py$jEeX=2=;-wuVeivou1e-ap&q@QXrX+0P^yodfP8mo z8>^aY8`)0E#VLs$LGi}NlhuvhO7}}gkG!-tbn;P9LAt@Y^s83p=bnSphPwK|)0vPk zm8DzU0x1z7HdPK6YVZWqqm0zf_NqUL{y}!)=jK*fJnrZB$?tW%c&xbS8=hl(eFa<5 zs*lx84K75-?7sv07TCAZxEA1GVe&rIN@qAZc0x%2biw$JGG$kh#noO_ja<%&@MP(u zaeOrC>Eed^h+Y67q{bx!n6(OJk_juc5@qk`Ty+NX{cN9g%EkG;_-ym@HM%M=HNyk=p4F#y@lP&syr=Zb;vpc^Wz2?hx=whrF>aiV!v63=g zU@qPq-SPQ;=@4c|@cOdY8J#g6zG60bVW)>xedx&bcGWAx;zFe~QG@Pw>GS!@|JXdq zNYWKlhg@Oh@nqY}63pY6q85IQVIl}rRPeCoFsaA~I)GStXMa%2xV8hJT182Q$?l(c z4s*j}*C!SOxx%e8cln{?R-joo53E-?n8EGRDDq%1LmaY}y6edS(u`(!ESZ`Yb}LzO z#5s^N(=yi}mndc~0|zN5AT4iqtG4&Dk84Zh_WdjOP)?rEoZl2NGnIjf&f8+jN_P-7 zppkP|A%(i0owxR%5}KdB)K>~3^qypK4*x$@C{vd=Cp(yITgl)E@2awp?0j~4Eg;Z& zlDEnxBHODgm}D;Pkscm$ zJUDe~5=Ka<(5E!BP=!9y6k&4)$AQae>~GJD4>avO^ef8-Km#srZU`os zGk2K?qn=Z&F3qigvF*3do8(TBdwBc3`gF~<*hgB&ttHJepO4#YX~-V@KrPH4N(?x% zw-2z}BpSM9u(^2fYp|VGcyKgCzptu|s)LQo5x)ez0-dC1@TdNMr83NmCJW^y2vnB@ z6yfxO+uQXZEs_b59l9VU;oTJnn%nICmxZ0O^TjFG^E;MIkSM^_mryj#_G-vX7#nPe zUEX2$Ahh`41(Wa{Q&hNctg50ON8F}6WbW?LO@s59$j8?vamRRyC)vXE3MI zWrz8tc6g)&;NQ>oNX=bzB)A!}uqP=5joZOE8t0TV^ghp0fd1U>u_)LPyqmJx2`1Kj z~3u}F>C92R!gtt<%f76-NQxIZcvxwMyzi`2f>0_$DT61b9BC$h`mw7ZL#AN?Z za#1e@1v3^^Xx$aIERMGw%`ui8-RZ;J*~_7YE^e5j9q61Gt6~qSBKJGcN0BFoYKItA z1$fU8K0o@R(3J#BV|^6X>`Z^7NfmgvkMUF6r6EH6#RNUx>O;Wi+w^R@*PM^1$yJQo zMts?i>^&KG1t2QyXw)0e-|TUd&wz!MgjB_3&Z~fQ71N$0)Yc3B4kVOuhZ<)l0C>aMSnodinoF2!ZN?ddyaEEQmE z1Y<7eqy7iaaWS5x4O=ws&=%5FiD9Q@w~G69v$7;S@BAB+OUaJK^eYfd;iE98jt)xP zLNmLjZ-`#m0eGO==%W}uI5QmNbeVBGhG-=^8c!*x{&hxC*4ud}!L+=FnclS@0%T$K z0r^fNVWVBY-J@~qRb0zm&xRkr0_BG6NMx+N+vqeM@*=yt?w_&&I!femamWKF znhkY8f@ii|kPF|mz_i@Iy!Z&VIoNDhB&+hN&8wvPRV8q1Bv}Z3DlCl*O$ZQslb%(z z%*Po(ujPS>@hFe~P^HFm2_bvPOcM*rBSC8;VsL@KHl_oiS{0U+tc@zpb~&GNTuUTv zh~5M$&I(d_x3cIOWCvg)Q1w*R%K$dg-VZ@RB7^;nL;9?+nuAd~rz=4zh!+Qgg9s~_ z6ED!2R1laMoH;0{tqG{}u7F1QH>uAUPYWclO?GEaZgj#|NJ;Sa2p+t>oW4QHGDu`R2&_c`w&>Se!gqV#J>v@# z(2Bd-U|7c=S_QP!^>J^1YbSa#NPh)AJJP=L!z%~z3f*+N293S*VsV~*@v!Zs#fbXq zDHn#E2-6VNbfipKz%^o!0G$Lg=|(&V#W5mfKK=$rBaI@)3afF*@W$*tYxLmA&Z8MWyYX&a+mxVT z793?Dp@r2{oyjp%doUCp3y&NhhmKT?m^^4}m?Qj0t7GQ6Lf$yh>eZnB>*HIGkK5HO zRU@BnLDTfrueFPHWq>Y>4HYwx^SnOdQ0e870*vt8ZZvl^yCGZBZ+an$fDc^pt&%i-Hf(-rm(2?%^FXB^NV8Ao6aaO+5q zbEMuibbPUB*j^2cEDSU)U0h;1CTJ@-p9tfO<%BecT>+BUJqmtvK9SFD1fL^ma?U(U zbP92WSy#e=Gt}JUt>}U=2gFw->`Y-)Oq`iZuS<^%a8MGMd_>#|bPLJXBxDEBTo&7O zNs{eyR+-AiBZaE9x?{)c8R{5@=v=4VBxK*K220K?r6O70?Ld?!4Lb)_TlOxUv3%PW z;kt;6$zK|RAOG!`2fEyfQZ5g%>lue#$6D968_aWPi+l4~*70-w(mOc_84yVWP7K5M zR`>puY!G9QV+^i$0vJbdBR^Jj7+duHNB{tFq3#-q2c!w5$kyWR+ne-BLBGSd zYF^5Uhyb4IIHMw^Y8}Rgy^(>aj?M2X?+W)ru!r*ap|oZ(`RsdU9n}*xO?7ZpT~fl) zMBN;XOMo8pyZS1GAtB2*I6d#oZr9KNlz&+Z@H#9y_?`D)G7Hs`(PnLkSQdf^J!B1p zafL1^xTqW1SWf8kH{*^O3@VnTBC$kt?f+`B zzw+JyMn9Hyz2Yw_Rg-F&mh3$GWADvKn;1zo{IVBfjYwmOygQMmaC5n?I9a zu&F5vEP{fi-T0p*J;(0jkAa!GqqDq^iG(>{h_7v@! zQ|dq27%0c>K%dn_mmW1tV9dP?Qc&>CzoJaPROnSbwMsRjV+kgHVDHhQTlA>92eQfnV^_Lp4@enpP-DII`PYoP4ER$!yS%^+p*6-Uz-}SP$(5t=tEK-C+KqW;bXvef zf=P@78d@t2MXb%Hd=r$ogMVw*X)3dg<#Tp8!e}!x0fs?>GGtcg=%byUAfO{_g^gq2 zaA+z{B?*uh;+i-Jy_zol?3%SBLFZ}KIh&S5fFO8O19+%T zU6eUX6SJWBEd#i}B1z@_zlSmXwk5wJNl2Hsc`X{Q>b>W_vVzCYywiIO9ryF72N7)x zZFgmAmFz!KF%~<_SpBBh{XL;1XEZnOs~L;yNmf5?bZrja!W@!4ul6lhfh)`BM?3em z(JB($Z*>OOy&uRn#)85M@H~sz)HJSIJl@oq)f0cYux~%Q3d4aW#Abx7%_Q9SOfI9h zCAn9!rlv4)K2BS)>2#h`tn*rV1Fr|sNurTUYc!aWdb}K|O}1nk|tl5j}r(=WkPlZ{MmN># z$3Na34qhR1{Mf3%g%p>K2$0xZhGDQYw2i*ENrXc0fhEk_C#tAjq9BDH)epftNRrvt ze#GmKqGUkHy=^@6l>vurY$N-`>+hHePV61`Ytu?C zM`!inOQA~A-+ndLa~Tkvs%oVaZw?ar#f_6_Nejip3AZ#D+8w2tks!3DFzSDdJTVBF z&i{Jk0fsRUtudtbq=s3?2`v64vtfg8+G}Yxx<5|{w+=RrPVfzP@H0X6n1zXU?349k z2*7$kO9Qr4Tz{d&O+N(ZF{QqwFO-3z1WjqNL8{o@cpw#6YM@dvas09~+;j z>CQ_59%D8LhdlGEj36!(nVE3v-O?%A-`sV63}inyBomyVRcMcK)jQ|2#+6WymuC2O_YKgA- z_E+Ok--mT_})I$I{X;%@W74K>aNUVqLjGa|LZe|;SS z%MJ_@FJ!sa>i)2%Ic&D!Uts!q7Pol79{m%0R(UaAnx$aoo{u0w@`^WA9)ijh_6m9V z0-yeum6@eP9l+zfmwjT@xg~kTH8p>cwZ&||`wWY|`10XFb1>aAnBCpp>~5Nk%QGeF z)?adah+*wUNf;;gV&{eMEgwW#_mFiSBPNI&TpBSb3soYw@lSrJmB5gHDEVA?K1 zs;DIZL$xeeqK3{G1D{~+;F9M!O7vYDK$8w*61_BivQLk*D0_62^WFT$nT3s`I`#-V z#VJLi4+qoz+)|AumRuWpqjo#xW~0X0@m-xbx;s)c57fa}u~_G3+h^`uAk;+-ET@|^ z0%&<+W*+MT7+`;QBmjiu^GJ>c2PTdI+cZ%kK><9U zMw2}{Hs5*w*N+=0xwI0N{kWsuKe{V)86Fu0pn5hBCk6TdU$U9atNa6d_7JJ;NwF%% z3~)Z2@x2L9@4v~>2V-%JjW~L*A6m>0gsOv-j6Y5(Av6L#qBE<-mL8@<>3D=_!W8Ms+A=6o5$6y8ggU zc=s6&9bL_q{lG)u*~5wjedLV+yBbb^9DSb44XvjAOxgUcUwG>>V^gUOkLh@DC`EMn z44&0yF%Ea8nWDTC|2W<*E-$tW#YqfzS~OnxOUiF0GGi&Cv<`>GpRZ!i90nB&3aV`%3Em@R!pM+ybJ;zlG1Qk5#^etQZX;Y`*qz zoyIUa{o`fE;&vu4U32z0N!ebRX*u>g(<}Sg-u2!Nm=(Teh>*)HXs5YnI=}U^61NVY z&)cmI-MQ1E2cJ#I(+A7}2`bm&^Zf=4b9+m(&Mhgu!duEkXG?2;2;ru)`A%hbRw&J5 zAe7K4!Uu!R)6EW9_E*(PS=W76AbGLeN>Z_Hp?8L9!J!^d43%Jz;;VN_=Bh7hvjo_H z(yOC2N^E=P@Iem4k7_X_ddA|_Sl7m0RQVR?Kh1i{OjCg9#XTs&mqt&*m>>ixTp2VG z0@fhO9+*EUhY)-xiQ9|Va{r(L|Hr)6Ft;R1p1r?9$6gw!OjC>@eHk8Zaq-ZxM$QC1 z;dv2W{vqd^EJeqI_$RD8pvG*MC0Gp>(n?GP;3DOJ)Uf6&6^G7_g0B(y4QH2&E!x0u z3^ED{%=*TF<)UNJSFWJeApc48UP>y09~$!A@=qjb;7=@=UO9?YefYoWum6v|w{WU5 zTo=9-0qITwrMpu=>F)0C?(XjHM!LJZy9DVJq>;`=zK?sK{hh7b{SSO|W*BA-!+PS5 ztFPakwsSJt-4MiG3KNH6IW!3Bmem64$$Nv$ijB(IEGP+r^$Nl|sVW)VW5{b%D)Ml1 zn~3(J`&u+L@Pnl>r+%pE#K0Y^U{#9YkRnH*tN$eLq3_!2|9-#KUrn`Ou^NLs!4+lR zKi;f&%h1+hYl{q*$IhC7FzWw=S*W5!ZzRHyGpI_gvu6!Kbh~dPYFmObMwqm=unqEg z++GfJv>Ch5waWgS%+Rf?h4M-AYXAYMOJl8R1XlPDA?Ov^%h167y8G!Fl|(sOc-7B4 zDxzk!o_XMu=LB##+QN;m+99j4X*(Jj84Cn^Ud6W+OUm>HA6Ssm#8PuxN>2sQ1)ih+ zz;oyK={lK;l1@ncXZ~yz>Z(=&{I+(pnin1~zeE2(^V{A$TYbT|i3qT`5{~z*i~e_9 zLa((N$@&OCNdCAy@~C!wY&@E?PVv}!!=252LqR8hx&!;ZSJ3u4#^yl2GidDoLrb^2 zapgis$^yft89o%TgL9rY@%^dyCNDSrCIsI0cM@@03o9v^ATIjepYvL9C`o#CStO^} zNFT-6IKD;DoI95+pUphsav`{#IUH4df`iC}H(NqsjB|cIfi2(db1SnVL2i8)fi#}& zNQKq(!zg;?Q!F3W#Zq8^K(CqKt~QQ)nX;)S)>18V9_SlQ9Wv${8ug1;-jl^MDtGiS zcKa$@vqC;Mu|`pGc-df%Zc5Y;QO`iQBn18)_M*1RYGv268`z?dKvLu9{V)C+T2?U_ z;&p>wcF@gQjPfzfFnZ_$Lk=Bg`5n(e(FKFjJ}+CCU0)#X3K0k8qqu_$r7{nbDgwD( z6NQ|)BBJgJBfjxwyexnJlA3ZeH4OmkCXOG zsV%z%$SrbZm>*QQt4A_qwgRX6A($RUi8oKv(Ov=?3!MTRwnKPp3ufh>G`zhk7#NGk zsgwf?Uly%8K}J-HiJr%qh*G4x1Us}dSg*I$+9Y@)-}TC7jbQ}M+ZKY;Vo=&hz}N-{ zwdCC_vq5Hj8r z!yH*G%rs6c&fY0?hCZy`D|gmS%6kT|?d})nkP2d}7p@nZk7nR#4{6?xih=~RIy1^-Zr@bH9P+zacQN=J< z^h4krARHA7n)3=DI}c2_Ufj&Io{HeG-UywhFdB*#d>fH~44EW{C4hd5$2rbV&HIS~ zr)C6fSKpoEshD^&w+D}q&i-3CD(=1T)f2%vXoE7lMxID@&UM+1ghW;rqRj)gn_mLYw~m*8Q+Wsc@i0(7v%br2fL%`$7H47I}b{;l@evBBE_K%{-UQ_5GLCU>c4fHG%1; zrJe--Uv(it+`g|RV^l5Y>YCE&Ce`9ZnhNAM_v{!Td7FoKdfDk5^ZaV$(+(F*Cm*0Y zBld2IjiS3QuSJ{gK!e?50p@OB`~%XU)u7CTY^UT@@Wk6mRITUzv%H;(edu%PEAk~O z*2B@A^iqK)u&(x_z0aSI8XAITLeMKE%Dr9BzZ=XRdaW=PpBSsYg$=xLXwt%z)?OD_ z;vB5?UHC#)rRs8|MAs0sR!A{dA~&#a^C)g^;jJdD;8Tt&&wb9c{5t zB6X`YC|wGY7<;7JcDA*GmIwZ0F>)C@M}@CbQTh&sPc=-gKmC+n%om#v#JT7`@}aMO z&~n@+Aw2P=7xvVK!=)b!lEdG|XFnR4oj=fdfWXw>%odBWax!r!(j78%oUF$yKVr{~k2#;QU%vd-iG@-|mym0Qw`)2XMS;P1kJ@{? z?p~F-vU-w|;fJ{QN%do3@}MIr4n{I54>m)Mzeh`_M!1dWG8_x+h~RcDkt$yru7uVq zNL8pGz9?PXn zG@F>~Cr?hyg6_tH8_6<8{iZw-96a6B^<+tFyh~2NcaHag^DSio=?jbCtF;&k%KAcd zpQw1%@su#os0*hMzbz0xfO2#TvT3cuIO8aB^iE~<3~Son`UJ^e8kk{| z3{R!EQ3{XAw>MX|Ywk3=Oy4w}r0>I~l^$M)ZF58!NJ-MQY{&5*eW|j)pXRjsdcIl1lhM{mg7Ya(`fG9^MvX4E&38uTCCw3G&_>Ts9q}oVV&n zIrSK4?DzY541G1-i_VMWu=#Vr9N;?d!8ssIXB_==JR>g8`VjXl9IRWgHCJA7rm@(j z3fYd)G=1f)_c&l04OaLU3*fAV9C|qkLZ2jdQIGJhPRgKU8C|k5X;p{)usF}f*TQIV zUu;|{L@|b!(S84~E9(U-RP)RYu;h88Q^OoKdk}ioy6cGg(6Or~FIzb~hP*c=*D6e~{2FC7*tRzk zV!mn+&+>-7#C~NTOTt@SM#KvspBARpRB0V{xoMpyu$P0PdvhG9SXHCs{p$T=yPrK1 z#eOWkGDbW`&{BI@Xrgh~(@kGF{xVU`D-mw_4R=g4t_V=x7n>hH3I~xO%PyApx8$8E zw`Oy(7>EgkxS({e4zL$LI-ERl-(aS=4(SbeT{ywE5pT`Z87qb3B;vVz_f^3;zgQxj z?AMDW^5msPz?!W29#}_Q^vO8Obyt#Y4eg0IXQ1F)=oRqbXKYLFtF9c>$a3zRU1@njjPw*m zw+}xg9;RL%rMU{jhq1f6k}m_`4()S^ctCC#DQ zz+b+Hk|uYXauT|4$Fxb_%AX#bV)d;5>tIJ;^%*nyWtK`tO7_VCzdT3J7)cupf&fFC zb#nr80tK9)9hYJJ4uil>gqDns8c81c#51ym1Rm*f{BZigbKzij=d)&_()8XBg zCL4n|wviXASR{#&VF;;jWtC6YUNYF846A$*r)J%U)_z*nWGfG2DudOK@P3z#Fy|97 zp+-UTc;Q~xysDedA=^wYS#wwrh8#9aH~-II@sS-{Zht3D|zXwQ%;@24Y&x_dd4 z-mfP>4@gO>as0rNaW-Cf_JPdkQc=rJ;|xQKAT&D&{{Bw2bDxpm$ zki*Suu?Z6Yc_1UMN})vw-$0*^ zexI@BTD*%vZqXqWD3pk*j%$vo@t{iVGJ3>I?VKeS8)?ufP-;wIAEi(42aSNDX9wsL zv*+{^v))YGB@H^do)_;d8^jFmhrbne__?PxpO|4E9sk<9mnm!)&(XA#R!RFVVwFBh zR2!Lxl@holapt5kwj{d^c^gBV7R!aII8MDe zefBI_E?Qcp_n#h2wBP%UTMB%xpVh0;aG8Xu6q{tYA;))8OtKVS%PwmQj|v0YWxL;z z(E~gLG3%)yg0=XM>c+}OuFe?xNY)DWD~(5NRcu)`m_O*oIm6_j7=^e}%hEZYI}7x) zuP=Xq_#~Q9%sxVemJ$qPV;P=Vvtmjm^HPmCxZh?TR5+I{)TSzb$GIrylY`TVHw>G{ zH;6MP6;a;hl#KMUm2Ar`5c$}cZ!vfKNn;JxdtE98VaA+VJU$uJ9td@G>Uz^Vy808E zXKx($(^}00D@DN7F}&Q8Tq;?Oi`OMS9nRnkb=y$x<0U# zEX<;Pdi0)c{>2E7B@rlW@uAynH6ZQM6Jc`VvlhpqO?=^11>sSl@>wvOpic zzu45wwl2G-W5*;*maR9K3EftrLt9xeUQzOseZ!oi@>q{x?7pqN**I=;&o!hWjmoo{Nu^^Q3o$h)NkMei6zTfZnGK)d=7Q7B z1F_&aVHoP+SiLUb>bni~Th40adqF?wpC^THjfVt`3;z)~k_S79nu0I;Df9}~e>bx4 zrfh?98?>;8ce!zUQ!klXJ%%u0NfYWJlVR8!N3dC>&8G^!lN+Nkf7T1D?&0n_wxU(p za#w(_@|pWv*$^eMhngGb>EW?s{6c${G7o=`UBKDE2&0yd zT5m4Vdi&6%>*l%jF^Ggw?9S6I7?D;j^K*ysULntL@Ll-L^xosdk#g*y=-4NzEV+30!Tt)1qkJCx#m z7f8B|{Cp`Zi9{E}#J3c{jT=Dcr9_GzHPxZf>;8kG>YUvALA;~p6t=6P&8`-X5|;H` z)O+dEX%w9nAdb;J0SORC_K7RNKo5G=< z=GUNK;yqiW-C)BmBp;*x7g|s$s)$rNE6ps}eRW`5pZk1JqYs4t`e6y;jI0v&m9f+I z=^3kNq?P!tJzPpS>wDdtK4?Y}95lE~gr9sdthM@s)XN=6_0Calq|Bw{+ix7;!6Rjm zc)@$<11RWCPa|E|+r-<^xN;Ojldn2v)7RD6+#}On^&6=tV|$+8*vAlmNIpO1TFSoE zX{@n%$#AdLY>BpfWU#}{U#_Ufl{NcI$0R;jnJ-T^mm!K6!cOCyOy}1uum{B56lP|K z>j{ttxoJS$Tko73-&EndOm%^9YI|oD?uM9gQaey@N~rzhlFcB4Nc zMjX|CMB|z2Po;YWGrE~JXTkOme28izpH6e6UA;L+)bAvUQ;6j@^-7|%jyqiCzb2g> z`Frp`=VH=*+RL(cuf;BcuOLMbM8(=6`AdT%Aw3eoBM882k8YR z_=wFMVy&0UCebCv_=@ob*MY)lp(&hQW@5%aT3h}=6m)+@!YWpTD&TvEwfy8>r>la) zk^$MLJq;H?x3e`!9{^3ixm6NKY$k+&+<(gi`p!*C$7!%~uTP5SX}A(7&mpvTffp*# zlzns#tIC}%EPS4J)CVEoN5OQ89aN9mUH~2K*e^`dzTLK-an$kPe4F)o&U_pMpSAJ* z5|bJHkpu@-dGg`d)H!Q@ddc^ro+Q&YZctj?%-_188+^J&s$nQ7(d{{En=MyK1;rFshYevT#rvd0=fn$I-6+2L(zH*_}^83I5@BVNFael_c(4P>(v7{ zAWa%KMeVtpnmTpTHqA8wKQT~s9~2k!cS6Rdgbt4qL0OAl`GhPp48=2QB?!4P{+(JF zxajyN6>{REhqca^REBWrjZTUTB~%zp33CbycB9L5)~<`;O3%wB1Ui-2Zc5@6^az~e zrpSw0!JGjA3&5F#(LU*L7pR$}Er~9O^o{qU!Xps<>{>@W^yAYcP)u$ux;&ZMuzehd zu)c8BG#V{8!XK6TivSS#(giLQf#w$~Y%<}Cp?4_!JviK-UOtF!5DjJtf7s0{dGEt) ztI0C$^$76|&So#ZMUDtE8fl}%-P~?+M)IeDBMmjERun3?N9c;4X4VhlC~+(l#LRHe z?M)Y2wbNzdX*b9xq7hxW{yThr3P}0>VBrB@f@X_OG@Wi^opC{jD^(2gK@rsQ|E0^H z%omv`FhCCi)~{ND@&!YghovkgV$bmDm3z#N=nGLK-Z!U8Ym;I`NxrbUbT|$(s3c-X zc*mCx&;qUHeU)Pp$z>?I=)Cfp37?64FCP(r7GOr?JynUj-o4Y(%nZ(w%RtuOC9(2@ z%bH{*&YsK*`jqK;Sj-uf!F37EIC{CMW%*Yb)R)*D63Z8+M6g*~vS=9~CSroPwFgG` zc==SB-z8uA)T`MpD_Q8@@0!jiCjw4pVK?rmPq(gTt4^zumcra)ZPncl4jU%}4{ex~ zdp)F1)xP6lNE#?QA{LJ%9-a3}zhNHyoADGvk^tt&l-jZR^b`ZM*+YH@0OS6>%dShRHY-K(KHh`P8To?qJ|@-uf^26(nC>o?mSO@OR2nUOO-ZIR?J6km31+ ze#EW*U%p9Opiw_fTlGNdr{5J?^$zI~ClF8`Gdz9ok{p!ibnR`DR`uf1ExN@#L>cj5 z%y91uc1>03i*jW+HE2(zE*T{=3DZuh`3xQW%`WUHCP0YTH~Wg8ml%0luEl^7q;xi4 zs`t$`ouMCH}a?_BH; zkgU*YcqK+dPimf_Z5&?qE9V7-1aDiT)`x z?zyqU^2ZuhXzyK}Xau=>Y=WNls(cv=rHZKtzi&rYf)J$^cJb7F$Re<82%fGd&x z4@PN1Abf6ZO|rbOkn|HjP8h*D5!tv_+QSdrd4?VF#?8`#&!GbHjeA2Y&L*=v9`XwYluYoU9Mo%Si39M| z8vUWlAY?9at7@>}4I`SD$E!h)T-syw3+n0EqvsLUTz9#>Ruwmz&}6Oh6m;JUGaNf=W4c!H zC+@Ieh}Dbwsmh8Tj-{SYtTB=3Pu+Ql?p6zJ5a-c5hUunc(Am6V`e4!2B(DXjrB(oR zsr~0q^&xl&J5pK9q3zq9;#BQVS4s<;(JV$sho~)&-j?lm;+*w2=9#lv;$Xot%TUP@ zPqzb7;V!-Ug2?$697=o$f#>{LId5Nr$C7o3;->V_v`6MN z74Hq&4M>169lEns_95BXg5O5R6Y23Sm2)|t3P-@iJNyiIw0Ev(y{hz`$s+fm285|% z-^tgC)-Q`hJmlDVW=bfY$>?9m-W|S!tjX|K)GqiNve2BuH&ze*qG?Gp``LX38Z}9y zUffeQo_>VyzrYCh-})l!qi?-^7X5*t(QM9783{l5Wy!x<&5U+PtjOO;vo?v+OmGZu zPV+CyRp^!PrZu`=@8rda#wgwF!ooHG-T}W1hG;>oT#3SmUdn%V82@R8g^BWV@OD*PHWX=Oe{)7t1NU?V-UF_}d%sfD2*xdWjG);00t8 z37*Qb|8H`Ff~6O}MSiX~7Rjjn>vyB!RT_^s=Ej+5a+9T;TV{=z2SY zl*FkLTayNiOBVB~Vc&`VQF-y7vkhks=2;KJyGvh(q)rRY#xZ=cGj{%g#Y}-Cb1+Bp z?_(3h()qv031E$c^Kj=QQ7Taee)lhrqp}(%Br3b0toW;Up~mMk@L6u4 zVnYtfXB39eWjROAD;o9nMvCZul$w-}gLNwdOM#w}T_5#xpq%hh%7RY9SxzH}}P zwoia`s@eWirvuCW*UHZ!`o0ZoU%o2L60UXG*!@!+uir7NqcHE)-){v#W*^fLW}M4n zOg1wLQLg(Vxy?ilWaCBje6QvIKDv;ZJSDWe7?Y^N+6yqYB=)UBst~f@RTr!loP&5wHO5<`AOCH?oMT`UR~$7fcns`z){@dY?>V8R~ZKd>=>&Yo`?F+N2;X}V$} z%0DOa_uu5wJaQtgv=`{V|M{O+`M=x#?`Zn|-yr{MZT{bq`nS#U|Gy0sQ#ztnB1ibu z84Tp%b-pzK0I2cs@LDvYLP``$b8>VoZ2P8f^U>t~Lx=+vLRfYb`DJBCpuKEq;;mh> zKtA2N3052?qw#mT^%hIDK+1+s}ouIiIY8F87AE5b{U;vvkTW%r~X$ zqsg*dwtym-mY3&;G6nA;Jm*z_r2(Y<_5jM45_BQ|e~Mzjtoi`n6O(iev;wy8=Z23O z*#iCH*-p!*hKTSDdu3IfZnLt&19l~4Wo=J4D{loeTY%PT+RnQzmHWr`s~PcMgekcf zElV}JRUJ1#Q?L@>($dn%S?!E0 zD7^bSip$NCG52|>^s3z$%^T61RO2jmfSh#-(9C>^bDRHj$joE9zlst3LT_ES>SFor zh+NZiP=dW7vjMnX*+5c0Rl?}IXl!o_z@h=V-{G&fYk4V3Q2^-w|BA$ad=5dz;-fG) zm?`Lx;!ol7ya&){dWJL7$XFU+8NooG{p9yT5CAFKjT5JhjbUq6NKE@v|CK|6@!vsU zlXlTV6uRNLh`)5dL9BM-zt#bsr%b}fx?dulwZiQR(fh6sn23$p5_xqsH5Ixb5(}e= zwCnx6XrP(XyBy_w*}Tfq!ouKnY>%2=p_tZR=y>!h;6Y+Ao0gzCW$m?Ox#vUWE)JlB zRIVh4QdE2N3w?vw_e5na&@Nk-YJn4g)fog}7l2)h0qaj)3ycE*Ueta%(tf!=w)SqD z8BYT~EqI{1op#(t2`>Dn=KjTWC?*{q&-$FMNckXl92FJyE9FT?fGz}nFNmK!<_YjH ziqQ=F;Mv-Lb=iKtS(L-;7c8l&8dBR{cRyVNCOX4-{9OCxTKlx~!D?LoFUa;F8sD5a zzLTz(=NXRE#uY07TWwzQGQqY{j!rIKGi>YwZUyB3+3j}|Bn`r88ox750i|kIOL+`a zg@uIx4ghPVAf@Z6UnjtkfAoFae-hQZ>51uy+4#FbRT}53ZSW}>9Jj4aRt!do;F^dwE6k@m+}lhtHN*p8Zg?npK>?k;$aFi8lyqU zf4R#$;4V}S?RQ(@1@e%Ghlh;-C(nZ`nyB#tjQa)HQcS42-|+$dr_g*jCLJqN{B)jh zG&YkFfN4?-CmwAqDl#64Bas11_T|I$K~X|^LV?OPO>$%s97yY9%tfC$V5VWTT5aCl z-K~{{9J}m!`+@p1wCJZ)iQ4YBW+L89V#Fwz8aA)h-tX(#vX=q)pFyUnPfWYq5?*rc zfv}jZyM^e@wDYRQc<=PcF+2nV(m0)IQ8!FH)7&5L&g03X-;I2Kka{T~Ei>R2>;FvqfCj2h!ed zzc>Y9HQFCwTL%4kp+^p;yq}pigh~{!FkS=sb?%3nXwL{_ivs$Nd!mL<*R%2l{ggO~ zV80&y;k1i8axx7svA4XIyX;0Ot={fpgv`42ps*~7aU+@icGbS?rlBfP=%w$&^I0IB zM~=f%EU>&!0GYx2Y%h!S+zhUgGzU_!!`}f)X}ozQLm+iPasG&0&2-8z%3H8>?KaP60CZV_rz&{q36ZFR}R1c5B7D3oqQS`y42y^1g`AjsSJtAC_^0Mx^z!Oyr6( zoz%wbuE?Ble>IrvKP$XeMD~wL@%~A6KL%7eXqt7Szzd~TK${%ZO>)3;#_mtcv6YR> z4Ha_-=8TFEm`E=!KLXH{i1z~%BwY>%Gr;r&Gk6m+t`sY1dh+SD*3N!<<^1O==kS zdJwk2mI`!chWEo`{Y4o@X5^*)a`LFnO^zZqc590K_E*Jc$;Y{&3#Jd-0Q==QT*NmA z91^pt{X*=6d=y06cAWSm_se6a0b01qeY`-fJ#+4BgimMPha7y&HY99=P=<(~Umh<_ z>J7*(Z8AT9&gmb%nan28>IHmcWFC>f<7HB}%|iYce3snySjY9R{A zj0fn;HL<6i|A?k_pkNtL7z{>W8lJ z>ZB|mm$QHI4p5Ylu9dygMg|F=8L&Qe0azvNKyZE8UsPoZ`NIBBpwO5hyPSgOvWZ|; zzDXCrlu~9NI`z73Ta>t_;+Pjo3?LJKi<(k9)+3S4m5I&M0i62^0g#vAJsz$baZF7lb{RQ<*P5M>x?Hnp&{eB5JhYYk6vn3m$NaqK{ahqQ&Cjgc z3(j@ZV-X0*zb-i)uISo1CG{FLs zO*8IBn}~rmXcRUe^O4PM0y7?IHXC@2N;i>lj+7CH%XoqH(3I$OQ|ZEvyi&J(3d= z5+YX>3v6>d+KrqbNae?iF^-x4=yEzRQy@NfiFJcgiw9dEFK`s05ATL5r`Vo48e0}< zbh=LPfFY$*m{1iCXBmnXS|oV z8>W_hLGYfMaAWB$j;4l@);b6W6dW4zdPOXsv2s2EheHP*)#FlH-qOP;;$J+rHX~yq<>b0J;`zWd>kj-b>A*OZO`>mdbw)>)hdY4Zp*n6-d2kk)g-54&M0@Z$6Az zLeLkCcsoC5Ufe*l{wTwa$KmibtVjkX*6cutN+PtWb$&QtMuVA_y~w3(zotZ?B#tc` z_y60I|6Gm8C?Y9nxsvJ~l4fSJg_1bdog?R(AM?bkx}IB&cyr7sNc~O=0Kb)kFWWT` z!FEvctU;-miTmLv)5bHbY(Nhq+W3s~R%tc(MS}3ky;L_mloc7VO{WK95SWh zBxTM_7}8I?W*9Pq;53=YpwZhk1~R(^=nAd#H5`%WcPOfSEQjOCZ)2)#cB%8~d*8I! zjUNCj?^=1Zy#Zz(G9X0L@Zcrv%|z5 zsk;EaOehQ{N|*sT`d|}x=;)Q^NbXrvY@5_w5Nxcz6mciJ+ec2vL$nc<{QiTbuV8G- zx`J4;E}F?QB-8S05oP&O4L>A^5ro&nw*RFG{znE@gQMUF2Bg&|P%4VS1X_-;5s3Cu z1s3N!zujGzgHQ*KknOusvVrz#8jbrl$z5u!5QpK|N9!bM4 zK;pMFZc)n=N#TW}t;ZD0@k`Xmyyp=9;OW4->3Fk|j)84$WWk=(o>ae1mOb?TkQSh8 z6RC;qUYDNXRncM3C3cAh!k~MUElNNLR;o3Weuj^aVa{XWQw1>7z*&j}69 z4!4HJd8fo#(}3FWCPMaqsI`9y^jF~2zThGRX&7eC>gLm=%*I))mT9}(IT`rGS^;UL zBUQRU_^MNV>Q0JgGH6{1@t#vfXQw5Hjh3b@uDDg|Mfkk(kV z=SETwj+~K3)I#Zx2fZm2TohW+2CP?AzV@Y(%b%~Dcfc}o0u1_(kq=zJc^^aX-OEp@ z4k(SJqPd@4K8LM0Y1K$+$c5Z2U1NM>`boLF0+ag3Bln_;_@FVm-&maH57cwGUG1Mu z1hq#kUwJXfeGD$#7#nzB+-)hY-7XfMS{v*`3lY9r7*_ zjVJ9CLX&Ki&gMGbaAw!LgA7;93sv*^vGTG#4%dJ?($sXpFx|^WkzZN1T-0JbmE{zf z4&^>pJPO3-PSu2e)b>&QzL$MPp!#uG6+7R=lS+o=7nqEhI2kPHZL2A#OKz(LflZ^F ztr-h{n>}+v+4n&1%#l%U(CS-Y5{#APJFdeSO|b5SB+rI8B?+ZwF`vKMjn{`3iT<`L za5GEFYPq}(XOlO_pj9L4Vy|gq3?KK)Hu>L4#77F^X_pJZ0KEXuru!}g6lWydnLcp$ zj#|639YDVX`4qdzdn1QNNozX;YZFy!ysdCjx#RU*vS&|ThKIcedv0m3W zTDHJO`D2>&NRdv#us2%t#ye~kJpk)V>fbzMi`ZsAJyG zAmKJBVHU!T0$L|@escL7KF0vc>@$y+_rspwfuj%3)#eL3mWA2l1TmI{mE8Le38tgS zJRfcv5OXs0ezC)=40p(#tOFQji1E%Wd)dCRVMO0hwgMr_c0a$WK$+CeNtVjq=}tE{ zulkwCfano`2wEz5b0R%NVrClZP(I0TqJfjI`jzO`yyeg#DdUDLNS zu!L}uxogHd&p#3%^SeZ9M-*+KHy8#ku4t+q4&*F9ryu3za%niuDGE`$-yakynlD!C z7RKA5gTDJCdPHWs9hM+Rsc}jRM-4a87#FtdWE7Fo?1kq$%CaBi#{)C}{*5aE5aqRX z+v%9(bUTmcI3z!%|HF*IM|$JioF8-A^|D?jR*ZU)x`25h(tacvy(4^fe^g~IExGWD z*>`~!YuRUh_o(g+Sr~Vd1~e`FLGXIo<^JkmewQmLM&GYWEuZd~My)=?rjnL4Ey~Rk zhMA~By{VFbR6Y$bx?Zorf{5-0vCnh8enhefMd2QdE-*g>fsxQ5h%aglX3G&vD{L%R zfUgt4MjN{YoQYr?RQZ*^*wSz)3XiH9sC2Twp3g9AcVqOVb7f-LOFrfQ^roKG5wqT| z|KXGPxh(2@>OhiokihYB!!;S-xwB^G^gC=(0&Oo)0hM-9QHC8%`1f{|;onU-JtCww zv|Q{TK)h~$<itmGr}=9SbV+Mt~FviV%; zPQ&+!Sn-fk;wAN3`dt&hwNgbYh)hREEzS!#EPDJn62#JC)hpmIjJ?_~@+}$pMMW!* zgONBZ^^z$_DVxrz41w2BqLOv!mRrL_(Gl{bU+oDYCT?k8uD(eFz~L64xZKjdGJrT5 z2nv2p1Obrp(oxf*%w5kLi4k$Sj@bpyKEt65p<+}TmFn)MR;q8ev>t#n8AgJ;d7Ok9 zu`O6+mHU=QiUj%Q+g!GW^xRiLOijR^5Agin?c3@83P=v9-ZCqXq5UZ?pnVtn3Td@F zSKGdorp*qm1i|A7%{10U7)^Fw(n{{t-wmkB(Y~3Kk576Dd4z2Tb+QAy|H9GeoJM1x zkwqU!L=-fw*qz%56<%T3><(M@u)RA8CCl2^7|#VrxX#BwcsOx$Ft;a03JBY0*XxHmMS)YV(QI?F^QtLgC8&_>&G_s{s-R`>XdET^VE6SM(uRk+ zICLm-r(9HBgeX-?vGT>&#c8;zlyZv^~7#fY6dKc;4h4n6Ja^%F9z z+I!p-XOvGuyKq!~e`mRPo}Y3d^SpP2Z9t5)0yWtB^@apA`9kmau zdboy=pZGHk<+B9eS_K`_6sqsGK^6gua=mwYclerz7Ba_7)AM>RXBG+C6*yNXobszs zHkT(~;;q!(90_VIvBHK#=ZQk``&Ps4-X5j_T5EL-XL5=d!!PKWVx%4D!rn6dl zT=p4{CT{`>g>#y`L_E6>)ydn5_zeSD%K^CjZr6wCUfNia7cA!2Qe73Xo z@xqa=_osv>!rIPPOn6dfA!h1mKzaIaE>vX&h#%N_Z%HoVTE;yV=>=y9+lUj&G(qj z7hb1oQoU*?P`32-q#!`hMQWMHUGhE35~G6NiNHr6!Rh6HkK1`9Jl{kBx-&uC z^?JHra>|6ZT-&l7P{qHf-(8V2-~*WtN3|2=uToToDc@6QwmOh6U>U!iRm8TXeP^eo zxNNciV|2Cz*BHpmL5Iq(iyubZoJSt=CVFkRnNBusc$MLkc2<=x&-o=V=nvjO`k`Y4 zW?kRV__71NJ0Vr>$z{-=b#k_el|`ip{!Q-#9|c74ld4P7MuT9+ZF?Xv15z=9PUAhW z>+wpg$>)J|iRj6dQ;(J)_RH?7lnH-MW%2i8+y;{Y6Ibd2r zr-}p}rJ$Ez=>i}Gw6Wq%5VxN5wKVKa=|Qn7cur_jV~Q()scGMlj7rkE6MJ7uONXM} z1obVZ1ygOFSeLJ`zHVfHI**^mFpen5`b)e_$0&<-SWwa?`9-DkQea7$R9GAUY5ow9 z`XbKq0Bke|3gw4iXGXlQqaT=keO#E2&bH3DT&O($7V$kbvW)pC;3E{a(cAAzXd7c& z1G3@7PvC<#{1I@_Q1VM)J_BCZvEDiq4XS)X5@1J;oAwu8=U$&~Y^>6yr=A5o%aGR} z1Aac{`i1LWrm^JG{&f37`Uh7jatZ;?bXu); zmF&(63Yu53t;mOCEnq@I@0n(nSBFs4aDEw|8wWK)r6rwFP{HLUXm+ts={-ojc3wV`!a1|ygQ0nurw!G(9$?oNKiLt?id4kd>NAJ7*|H1rbe5$h#&iNglLl zkSr3e?4EZVvjHV~v?WQWltnV=sCJn5H^(cTo)0pT#H`wsiY_byWnPqur3s5dHIRP{ zwHbv#&3k^DJei+^A4wpgfvw=>I2STFJN5d zG~mDl;uK2o(I%41^1<2+Mt}HVZ=n6zx)W>SL=ff@xitI2aVCj5cFgHl)AVpz9}NCh z1NJUOB#5>+*i6>>>q^E1blpxQ4XPzyr9qBI+ZF!6t|Y4E2Xe&UhCctDZ*f7)hS$SH za%^Kp$RTWF6|$jO6Vx&Gru!V=yQbiZGIuFf&&iw+OUW-w{s8il4DOcXVr7?cGif`6 zxR-N=lnHn=thR{}m&|!Is}F8M4^qbTzelD)!YAp)p~Q2tOu zlCUq|-blv|9Dh(y9Uw;@=WPQ0GiS>`ERJxRvUdP5<(e#(a+fTCDMWAPjn@T@k=F`k zT&jOl9{^Y6Z^iU}x=wDvvpW`*mR>kES*|ZBJjzkBC|mp&3t+34*rs|@^1xU2*6VKb zEsb^TV3P;PPF)Mk7a^1-PAb7>a^!lH94efzX1SL%$AluxH|@wTDwETha{xxzCBV|x!I|r&g#aI!)9yhN9GWLy9GRn zuA7Rv4)@#P)4(bS#y_-G0HF^WN*EjnX*iGv_$HeDHgHCiJQniqaZC3_Tf6=c!s{z7 z+4^@i*Sqm=ep|&n_ypi#B>gZ#_%UhV`TloOyYlfu^?6wwj-_CuF6Nnz^Qt+GiRT!k ze*`>v>cM09a!G_-0cmdr7RJ(MGEFELtoTaJ^{d5VH8007ps_sRGl7HHIza}1ysQY+ z>-?Aioqi}F5fIh=!TZq`I7}mms%CE)aEmG;-ET6^wSODqS74!JJmoR{Oap|#SD1XC zpB_+sSdH8ieh-up5&)wBO)?6^mO#8;fYZMu(^)5?lT6ZI1n090CRh7_VNZp?$jFF8EJEaxJ`M^Qgx{Hm&=6buK5ETEHIu2h%QnY;E~iu6+_n8?@a>7)1CDc;9H4&tQL!BfW-?d2E>iW zl4#1T_tItr7&TXiJ~C!_#`zdOgG1!|7NJxh&T4OHhDocQg2J0|zN8$8m#FB_7!zVMlnn?<5YCnVdkMQxDvLoKwL)$Vr-n&1#k0R_4^t z19#@zH%%t**%%q{t04nmB-@-%lNJq{K`5mEkG;1Ft7}=hKsN*uLIi>(xO)h}-Q9yb z1b26Lm*6hJ9fG^NySqCixN|#4lbrqCr~7ap{?AehUzBY1_azzOBo=^i)@eY|}Z})T{z#v=*EqdvH%E*C~Y}8X&EIvtGAY9y9Am zWbFRt`p@07of8fWp8vE{m(CyY2FGj!pUqYp#bVj8eFtNWwo{h2Lkn~c;Gr=IDK0pT61h+{M|Sp@Zs>2@~Ssis&W3{%9kr5d((lDP#1?U zncV&w3O0!|1|h;gn>`OVp&-#k;|#XPs&$qvnzq$Tmj9Z0a{ntTdviEq*cbsRZ;(zV zA=7Mx@9!Hoj1!Vivs|aiRoM-m0%b&HRP?PDjUWXK0WpNw`e1pHpGrh(1Mf5mjVnAza^_P7%mIRs-^R2Z@h&3X6A~hZP*3?EFZbT# zT^9W5xv~>m+_i@0tvB7zN&e$jF2Swb=3O_uwhH7BWQBUkLoD$k9_n{P?BV_M&`o8M z7@y{fIEEa|iPQU>_o}5JPWR-liyx;q6{7ChZyW#M3DjBy_l3s~8YmcoBzFtARR(&< z=gpY!es_{MD;)4*fom8=Urj=tE3|z*Berag`4Kzeow_O(#4#1tIlOf55{8M8EjsDhRyb zD_l5KPavLo*jN0u$IKb=$Nh%c1E&(87ubnJ>>l$5&I8HK{vG>o&ngTK#3CmU)MA+! z|F}l>!g`zg@8<1}&>bnFxxs(}8s@7`2M-Q37Q8RaZ;ub@BXF{iO%Sx|?odtO%uF9ky2#5t{N-K?~K|O{;@Q-D)=KH>A^67dzvrT6JR0Ep|0tp#$ zZ`+9SI1Q;nh0@oU%yFd(!mrS183A1tSw7!8TL8}HqL_))I)PZ<%5?gPsv^fA&w2;TWv)=urgOJh;;8-A7)&m@?;aO&+IGI$YV0u^L3_7S` z@jD}@2H8vQQ>fN>1B9oNU$S(-B)jKr%!Yt5ehPqRfu*%Z)*163Q+s@Zc!w zKQ_@C?k5h&P5^=O_J?^ASMiOlzR5x;%L}$(`_xXXLFt4OF6*pQ4~&OzPqk zlxv&!^QulWP^z-k6Z(P{H$Z>;QKK)GL=vy<16z_!kIXwG>gccn$M3>1?+uuB@3;_j znA%#UUjDwgoC8o1@|Yy*P0qkiZ2GB#a(sfmIT1h#DktQEjLq(B53nHHqXmO#Pr>(} zi1=UxpM}%-TYa;9`BOuVSSd<`tK?Oucc3s^01kezrs>Y>mm0T^K+E@2#Ms3Jp#CLB zprM3gI~(b^Xf+zV5MCT{GIZLfKs5=)eEa;5HFMy_amWhWn=AnOu+wljr`wA|;91Gk zYb`gQyk_KlZXMRMnbvsIxuX>-%yobLzHE{^qgd5ahJg#iS!q zC{1j|dE@OiJlyO9*lkpbdb6iD3k0@5s?nD`-!K|WoM<*oG5&cA0fe7EOZ5QNZvhU> z4WoL#45`DMeC{VMQ=H;bp~VrT{#s7XP%JVny4QoBV=0ug$9Y~eb*NCI$n?vO>CC3& zKUV}4jaR;YKIwhuzBitYtuyN-#*zaumpMr-mhz28if!mcG^5`#j-vSCf6^D&x_6 zn_BOkeACjSbHJTH0OU16db|z`_na$SY_n3s`c9H7cyP_@?MeHw59LXJ1hayS9*|+&TLbZ@9f%S@TGCDzM`aw>1jOk+(fKM+M& z-|V&WC_w^)Susfw2#Kh3Koib-fWSmgp-_PEr5sUG?BWRYAqP@y@z@G4{^ceYF^BuT zF`y{81Ns{>HtU^FMRMO_@q}{gSdoC7mPt(0#6e64#5JzpTc6Ei$a7bMW}-fM(mBkh zvLY6=lnvjx=S{XJkwlw8213HBA6iX{ALwL_KQXD@Gp)Qnxt{q&@ZcL|XyY^(^9Udc z6DEz&e%P)x<&Ta^v@;+ZLU;uck(AUCv(9Ee+q?m`sQ{|72RonGv$AIv~lA!XNC1f68Fb1#|{AlK$68n7SJEg^#MKmZfh`6jQa&dg`#h^ z$<~B7+4nixeVvBuzENidlWUZ)y3rDK4xx8*$ufimvIV;D1?N6J+xV&>z6qE9?iCT6 z+k>;%i^D#j5OjvVao$+k;V5^_KHjSWru;Q7=krPV;*&l%AbwVjACew)O3ooFQcAxE zx^lq|3(0eorEM#qEzr3@fTfcnH5$*6;8+i);YRlVs6K=L9#x1chfZ%!QQpv>U(ZUE ziacRwVuSI;d&RO2hy1oJhzHQ7J1cosgg&jv>U>F&?o)!lS{R@jRN!iz^HeTODE?%5 zBf>bFgmI-13t!`@|CDg5a(ivbC+MJF!scOd+@D46zo^R6B|Zg&e`)*}8qj}oiVYBO zf?9^eeX)@}q&Z1=W^R0~73EyYYR^^CfZ}M93kWC{(|Dc@ZUXNna?T5V)_Ds$ZBGcp zx=_R#2?3{3DaTid@Qg^G06xI6a&*E_U>H5_8cd|1g z1zx$Twbd}AK$=NN{Dv;P;7!rd=tHa0CT@{lUlgg^-4DO~9XlW_5pRxGIXsAjKD$D_ zOy_21W}9gmF89Spl~kTju$aZnI4svwWlDNd4Vf;qvuTMI`yQ|8JII}ws{cMfGu!~o zWPX85-e!E(w3q>Mm87oAA|cxl&oeSA@7qX?p7fqC zN+QXFXAHXjeNYrkbhw&URv4J5EYp@&O&_?WV>wt!hTvAPAL4nvoA>TOLWC}?2!AaQ z&kr2P_=f_?1)pb_0%BJbo|is468^iqK1l)0teWyMIT^lv_e{tcEMKZJICW;C{T@Gv zt?;0ZoAdqiIHuc@XZ-PwOTgplo}AB+=;m8B#`W6S2<6CKeVW6VDr>uJIO!vw!WMqf zAp?vS`zHt?To6UbaZ~y)`Un`@qAB4M{1Ji?PMHRX;w-iY z_8@zPiTJY;{^verJGkY&2nyOkn%E)MQ_%vixFqo6Djh!;qB)_JY@&s>lC=`gn9$c{ zv^!FXG;pni=_Clo%cx+lB9AL57cDlx-;Ar<^X<&S$hy5eF1ad83l=Ps^nbR`s1>Mc zJ<4ZEA*s|jMtuLtrk4^4fJ}m%)>yDhRN6i81z!3FU!K24*f!5}5qvgElVR7_AonvF zrPKg#C9cAOa)VxHk!Q^~cJB|5f8zJ^{m@IciP7|F0MVN5^I&dc{Y5`#l=L|UdBg`m z9W&5#Cd{FF@XUFfA(gzjR*w_Yo?ytxpU9Qu8HJaTQl;Z!AoP#p#4YEVlC!y{Z zWr8ba-l}vS#v6qi3AR%wwq0tKv7(_^4PX`bdSkbv8bqvnesr*hC^2eecxxCg~(;S=4(3 z0c_ehImuJ>L9z$I1zA4qH3eDtS92ig7H*Xc^`zE6(u4HLEoKiEx&37Q1%O}%Ds`D@ zsEPS&bXY_JR)V*^jf&;EO@xFcGoQ(eAWgjC?ZhZmV2z}Roy0qnyMU}H&EQh~TF3eRe42Sud(4-K^=A zL7q@ya`#Ql!ZwY~angMDRj7QLP^8H-)cIkcj@V^7W50$q&v1!FLX{`1yb=NJr;Y@> zq}%5txtY?$3AcSH&Gv<vkVxG7$p7al(g}~N~HX4T}zq$8{W-T7-YZ)Th@=H z6)Crwq3lEHEiflBh362y>iD!Mh^2ImR|0Q9(sj-wPG~7E87wr&dXB8hC5!xo54|j( zMl69gfZpBH8diu^cR2P_yM;@E9SRM(T@n)!#csFmoKWGQP@XqrtA57Xcb+4_E?z-G z+sm?`?9N(UpYN1PhOR+LBr!_L+w9<42;oYeE0Sxfgc7Iaqo0eJ3heO&aww(2hyTu0 z5D! zz+$o;-%*wd6~1g4(bqUXxgYAv!meCoDF;pS){j)e6G?IlJv9Yd%yb`!mz+0ymbqvY zO8Q%XtXRx}qQpNgHxOJ7UMXA zX2ZS_L1@on7ZQ*WrOA_6C<*#l19gkbla9DjBZ+iwSgv8;R|!V4FG*Q+87*}zLqvuI zfV|IIhul(wyHcmsA8CnV1-UZOpSIe3 zUPO;vUeBc0^&xi1Oz1h7=;y0zEx&UEvED1r#9dLr3{+<8TulXL8f2z6Z_UZvN}AA9 zslMz=;?m=p&5u(55D<*&?x_)cH?P9#E5zVDh#8LfE=nb2nO)ovzd@n7FRX)3ZyYyW zI9N(((xxWWrcI`@4QlYqeDZPbKS%H%*wD3hHr8nXLe%Cr}{zSnV;3hrQ1cTd z zH+D6C{v5GX_N-_#uw&IoC0=eZ4(KF6Qctm;G@SVtEH*5cGs*bLm|?G{uTCmU>X^?q z7}2G~hus_5U20XxP4^81Lq{ff!r z{B;#?Jd049#EaHG3prR>3q6&Ue17Y&5OYbiG4f+VTB6sZjaGx$MwnzRPaCaVGPfX| zy=uNE`=sHUylepo)wX2nc;fVts)XY@IspU$j>$@BRhe7WK=4t|R0%bJ2Q#Pz4! zmZ)S~uT#{X05rQPRGW$SU!q3#GxVv2z0Ep(%SUOX_DcbWpUhF3Mn}nhv{SP_ba!0wl z8qKR4kkQ&HTg$npGp?l2mPPWC05jBl$rK?t% zc!%RlieeFaq|E{+UriS(uAeQMSK(!~v*T99seNcn+56_1DNHo(l@?xiMqTtZ+$-4n zRsBl8T=j))zc;+xeqFwE9*u4)T2`e-c>t|(e|O|ZHz>{9c?602iQhqCDG+Xb2%}P< z(7l4*nLfzw zkNQFQby4rxYYhDh!3zp_D_wg!T%~dZy5QnBP3d>k4aKN?0ZEJm4m-QlN&((*dSOUL z($|(>%5sXm>61rS2cn4m*S;`X{b|EMSOl=f@8Y}Yw*VPK?PZlq4$FDTEQq!Al4IN< z&FiXCy2Sy33(pzmMNJ0n?|zsQM;-878x&jM7`tuqL_vZb@}%=4x!6^R2j|J6gyZi- z#(@#kiOsWkmseQp9-J_cx*;!{OW(jiL>vEs`UC*hGvZySf`>aH{QO<2lCOKG;Gfty z)*DXPUi@!yVg`^qXg}ZucLn@fe>ct!5&(xiMjhu4eB&H0sbDQ*%@fiz+!jwJ}D!68s0zI-Qi%6e8q?m3)#J{N$}513&2hQ z(En$N1Y&AxYJ?eNQLe9_;6Hw8bb#izdB=y31(}u`@#muALTN^E_r+3{YynY2M40$z zz`^VIG(q@TA62;lr@z_Eg|TYm9Xg!w!( z9-LzcBN@fd)V?%2>Z?J>%L7ik2tx>$v_MDy>PN7uy-w?A>(Wi6KZ5Oa7{G)?kw~U< z`@kxZelwgT%M9aPe0e~?1i`lFxN<(TzdB96a!`bUY8^$PEElf}EyfxM?vHNAd-S_C z0@0txYssGzoNxwM=GiR>1dCMbQRoOHFQ2doiEzTz{-m4s(j2ZDRVk#igSWl|*p}y2 z0N3Cf224njV7010Q#}NcJZ7@s55JCn<$7|y4nUAQplt}&b4suC!Gs||hG1KA>^aXZ zkbi|ve`boE@!T1(cR#tnA|<-$*&2%s-E-lNuP~Xq6sridA*z4*NPhH@$-($-d;A53 zAdW*J?iUJgaYRtuh@t(y32^q_$VDZ~w^G~frM&pzE#l{@15>uWv&)PH{xMmEm* zErbW#iu)4+EYvqp_dT6{(!X62$Kz*WOx%+GQI15tCBo>Xm9G2q2A()FPRN9q{%N7x zt-jiJWa7zrawY|#SEFg$%P@FKa^()Ex{XK;Z`4$X1iXFZTe$+?QkwF@-}eRZKYO9g z&4rKs+m7*%1j%b*jJI4iFn~lVM8hNH_0xW{xij<3H^z=9Y$J_l&X1KVmI$pk$5kgq^Ps{%9Y7LC|`1s;1e+K0LT0eoW+Px7#Z5SjEtJvgx9f(Xm(NC?@C#VzttqjN~8uqQJ z6+YCvxh$K?P{x+e>2E&0({#oSfBriP3ARa(I%Ir&XQ1{|+hc|1t<=hbN7;F%^2>M3 zWyZ7A8D28r*6e?*0q|{R+a~NCi6r!Iui-!CHIVpWF;z6>Jwp`;I%JsUpYQqi_`^pQ6d2`=Ub1T+=9u zKXR(OKJ;dXprc_d06VYAVdIOr!%mimkBc()1lLyOjf5rHnEcufR{_N=UL_xED z7z}Ikt*qE*V`F7b%nq6>InV}lhB_DZ^u*(KNa}8=_70Xxe9ey%O|Gh6Piy0glTOG=o~3U#u0Gt(2{JVt3#)lY zxW|0~)4om)M^A$tkRa) z@|gs(KNcDb74`1L=jG5(gsNsC1v9>{JjidMmnl#Ea`i_!tFhw65^IH_>DEfWJe7Md zLM};delHdu5wL7e_wjO{Hm|G6@BTD`!!7hdu~PRSqvT+rv|M}6^9B}eDBn+tp5>3wwU;`MU#zVWJ>%P=Nmu2ZM*ksm8r`4rkO z9HYlOn2sIfGK+P7I+REL2CB0eSw5Ga)tvy|t9MdKDy}AiTl|{W{$_h^1*!!rfrgcB zDy0^&yGnQ*i5u~`mGiyM+>1!aoTQkk%e^E~yX3-CGX2yXTi)E&nQ zH9Y^kyK&E!5)NnrpF$O^p|2)+!jmoT=m(0K0bm;_6{}q5Pv*ak4C2H2RG~kbKxaf= zYS?*l$Wk@U{>?Pg7h{uvr-6>*`#fc6VPoXB`n!)(J#EX*elcQN2Q=}?!}A4O6?q=y zPBn@lPI%uh5)EkB2T|DB6A>GvnU$(vSu-1z>@i>O1+`oiCN8Z`G#-`ItL#|SYA!mi zyX&}79xjzju-A(tkt$V)#!SglPk7T=S7+2oWgE|Gc6tr-xMD%W&~x~q<`)enK0)k) zrc|>R%Eg<#=6Kw>o!XdSZZ`UUe`6$MTdLVPmOyWHx7truqv`N@XRuXX$c=&EIELhn z{r+Hu0aHGAC|zWko7&0QaJYd;S!Zo>F~jcb9IW{a`R#PRMYH>^SBWfk7~jV|zv3sd zWm;RaxP9*#eNunEqfHYXM}M9{?zFHwAgP4bH*Yo+|6^A=8pBXfMpyOeuLU9eV>pAQ zdgO}!dJbiKIEQTF^!z>Ln~hj?R|f!DF$l`fSY5fc&RbDEJ~(wqdc4V?M#n&yX(a#V zXtQ!__CzEWs!+uLM$-7pfMmJcO13hM-9gW8BD=>_K7}d{agO!wLB;ZRZ<3~P)}f-x z*k)TxY4(~c$tut@&q#dxR-l_+T9+2Dd;>JFdei-K6wPd!DRG-FB^P$zw}E$IxOV-N)cd zNZ-(bm4i~UW4t9CRrj3(6LU`_BDDkyO-;&9pvZVrD*DLVb7eD8^i%DT&F-Z<26c)3 z83W2iVeXlV>9TDZviSX?0aN?q$AhKD+X71V`@`5&@m>3_C3)EUSr422M&^5-S4Fb> zRawG5=5Rgbl1J^I@ilZGF*cQIHcLI6uzD6sc)z<~-|8pON@L2OJ;!iV_+BFR-AsP! zR!6;fT0Od1;VXA%g|1 z+~w-(xpQb%#@y)4YnRZs+9+$5TXgBJF(`4VaGq_hUSf0{i1ZqaXvrOz!}>K@*FPR? zA%ET1R1ERCfl6;q0kB5Yr=)q}(Uer3BrkFgFH<8YzCiE&B|9?Q*n7m28X4k*MR!-FUqujHFjh7nWZi-CMBmTghma zUKFZ?B7QJaWMbc+a1w*c@T;@L2H(kCKlTN-dD^HL9)r=# zlV4O+@WdW$YETvlr2czEngtloOlwx3QiMsaM0oD_xFyOx`_X*RGwB)Hz*xrXYwpYO zNKm83BarOqVf2ij05$g;6p~fP@}V?yvek9;^g;D9)^f0Md+TElTc&*T{t#nM0Y2s4 zMs`T+40bq%cq~S(&C5CpX;JHe)L7m3J*jRt=&cyNGNZgPYou$v0xq5i|YU#~;*W5LayqSVa(jMA+!YS^;TQ~$*F6|kY zE?3+YQ@=Fd)CF_xFjJlFywP9~_2zR+!HkvkwBE=u$MZpp)X}Wo z;q2sj!tpC}eX^@Pkt530S^kXcFIYCx3-a8SiV<>*PE)~`%gIfqhiIyEZjXDD`SS8T zU-l-AQd}N<#HSzC-L7{xdl9i+u3U}M28D=IXR7X@-(KeKp>s^d^uxY8J9&X9|9DyQ za;jkYt5(iXXizQj_VBIgMIoGU{5hOQ%T#cY0G7*u#S~AA#!9Lzxw@mPC-sMh4a@Vr zpy4f6dc%&}yL}96_>J4`V2CNhN+J0Yb7|Y zb5XND?0u+Uy|~f}W#J0)Rbe4_Ir$+P?R-0!M9vSkFTZUc5hfaP<8u?i20(>1GO=X1U( z%`|tUVVhJp>f?h`i3E;g2I;T;KE!V+67DCtScYEK7}gXqI*hS2-lWJ^D2(~x#ab>M+Xk0@rxgh$BWx{vy2Zy;%;{CP%vvfQs3$~r+l_xa*5;7Fp3V#`SJ=g z!i7sm_!<7FFzjpt>B=4b2bYbYVaWamSmwo@Ppsi~%Vp8&;)Tlf7hadyK>p@@x+TUz z8YRN1Mph!1t6=594OF6-xdm}Sl370uOZ?K3tsB@LeTA67IAJE}T||zOllY!MHb_AF6WO3E{C&<^Fy_-DYYvN=qbW;6i_V zErDzL%bYa9)pDhg*J?r2!wGTZ?8bMD(G3qLB86CZ)x`#beUNG*|5*LOvb>9^`Ij@X z1p-NjlwcD6@c7;w=GNk_!8)-HJqL5FEs%}2BY&8f43H%+%Sfd+Mp|e};|zsca?m>Q zCTkRI+`@7HjpkwL8DxSvrXu zcrKAe>RD*JSLrv~rn?o~vf1e6kg6xY_PWMCa(&r(CO_e(kzX0=nb8r zrYe#0zD6qg!akuA-QQwj_j=k|>GquUdM5Yr;XX&@8g`0Mdv@7P5p!c}{@B!NjP!&- z^4wwB#cCL{Ixg8IsfclWjO!^}gJWAwcQonk@$dsbjhmi_;Mh4Fvwh1eCH+r^_jNX3 zzF?siBDBTlD2yI($mv=4$Q_3==6CAIpSfK72&E2Q3tF~#|B!fDOMdI*q!R|Ki0DMiv7Wmy0%urmg8d&Ao58 zkocAdnR%rwdqpBsC*zN%DR_?A@;giCt90%d-BNfXQ(a#Ms&9SjWihKC-NDg(n5o&$ z^bc{kauuR!bqO`YV%+Kt`0yCcCi`NmS&1~eo&U{Ll~^EcA~yJ-{6%^_ z4_twn8lKM}n!U;Q+UV3CM2Y38>TOjKl#-8{eijsYiUo$1hS38bH<_qijYq=uI`F5> zmaBM5`KYO@Oca_)$;y3#-ZlOg8y6A*a8-$E2nK{W3#r)BkLl&OJO`Ux9;ZtERMqK& zbioI;XmaIMVgv zjfqdsrU>pv_0%4WFGS0|M3YLn&t{v1qiMIhN<7Uq>jy4!YFLV9f%f@?|9PRpo1H~A zC+T2~b{f@rwSylm=JE{lYCSy(E7VadX)^_L_&gAAy4X@|cQ;#Q>50TMxi^ZH6H&a7 z(p8Ozj~#`2rLwAwXA5gf6GptR+e~Biw4v6A_C7-T2^m#6pT3%oJJV^reN}5!qsmZf zxY_qZ%PSo7FztH;#!CE{O@-g>f~nJId7U#9%$esnfWU<|6*B z@PRz{C3nBigIZn3E}B*%Ntibzx6$UfcaEo|w|vn{e(vOJu5b8y$>@x=T3dyvT!W=y zSo77#AKBdQqvHcT^3T4l$}}mq-5%>v}mmE+}y7&*KI^?mBM({S&(GuFsvS%Of|buJDuP~j#B&HFY$u*uz5Wt ze9`gjq$qy}U5PzI1XKKw*7&M@mSspAc`>Uo`I6mM7^08);a)FVE@(#fUEhl=FTv8W z*zZQSYP!6rgGNXIuxspcg(N^F$4M^H zP*=E_a)@Z&VtIAPrHy5#JKXxsZ~QKYue=C-dg!+1&6-59NO{9ehM0+WXtJ6u1M!?q zV_OO+pDkyMGwWpE@(P#Ye}D>D>-2gYzaGw~yLW`1rX{8B5hBJecX7Iy*S1|XnW!oL za9FKwaliMDi0XQ18B~bukNhQH$3Gr$bptzXvUgfK+4S0zRfX9j zHuvKdnrl_{W$ot7MtdT=V%56LdQ!L1Fj2h^{5$X8sn+dxddLy$9+~q-$X@lcQ88GF znhaQ~4>Juf%OtbSTOKW-CJ7l*H#lFbPZ~|-%VmJ>wc+Ws8^vGG)`Q=TkvL5XGM>(H zWs6+AXQ`i|nieomJ*PEW~qUxgA{QA`6<`twey`K^Dw2^HeCXwLxZklBVTcnin zJTH=R{qXc+B9OJDYl)OE{V0=l>Y$3=qeyawOYDPQe5hp;cFFP3b@Sf4II4%7?N+5o zoS6bGV7tCny|-7;)Bgh>OR->!VOn57BlS*IeG;>OIq_1340?grPQDBQ(*z0Csj5Fs)^i8DLenOL{h~IoF8*m@*7;DV4V{L&Yq6SxuwQ3fWRZ0PmdS|mF?fII z?&LJJj!Xbl^=p2(+g{}9-mO@6FBZT+J-H9;C=53L^}z6xymj?uM?n%RU#|2hAMVF+ z6q&1-_Jqd5#$@~^zH&mo=u@fBZMyNC{BJIJ3T43cv zu->t$RT)hdN9Lkb{3x>H3M=_x5lM}Fa~2{G>175ZjD?sgUSBw^R-koZ{IH_TLToks zfdm2?L$YE5KNOaE>u%ZI7?tm~-y3cvLW{(jZ2BX7f`L&e#S_aZMgquH0aF~a;k&!D z$WR&+GRvHY-u=~>v`cIDZ*5O`0v|#Nu~k*G?+tl6L#|8cOVSo25$h zcw()&FZ+URRcbw>2eG&HB$g{vgA5p^)o~5o`gVsjEjRUDwO{-5~DwVG!9E>SK^)j>P zg+^l7nA@kMFESdZi6oI_CRULq&=sR_{e?(cBM573S5K15Jrd!>!q!UdZFtB&2wG1O z94D$T#+QMDl`qwoXne&)AybslSRP=DKTA_>sWyCE!EQvZ;{V{23d=u9A=YlI=f^mZ z1+sEHgrgy2Rj~BqE4{ou8-g9|DH+1p(h#1=aa$@SU8;Wjc4Q;wM;Wx{rD84gJ*bgQ zsrm&`5ou!TjQ7PR6-xs>N4`z~o5)9F#3g9zy39eNw;z<xo~TC<+I4B_|W8wJ-*l% zFVvhC3wWrKRctyJ^^z@%@)KIHySkR$il{|CCul>3IvN+ z5bscOY-uM%?YUC3uvkrFsiRvhnGU0&Chv*ZFFLj!z+j37`@D0_BCJK}RKPEge`HWi z-MW7*R#dX%G}y2lL#nGc9pn^;C&gTsWK#yIU1O-<*69nU1!NiZh2bPQ_E>J{_=MKF8651nZlamvs;BDa zi`7NxU#`?YZGN2E{qnVBgy$JtR9&h3+e>!Iq=_wo%6I1-H0t-?1ks~*()%ct-UaLL zvm%A8T_MiLe=P26-dPw2WrBRlIm@!}G331$m%eE`kA#tX5qO;Y*9@x6rK;vOqN{Aev81#4=WnVWpFFdf zE~eU&DVeFr!%jT2Af}a0zm{Y&LPD0r_%HVd z59i^+3tRg5mS%$ZhyA-wMR{(XxXQHVEV>)<#p1;em3`EOwF3FB3ZA=p6SgA-IjE$yD+sX2cYP$1qo|i!z!_R(_&9^CHS9-)1gqls#{1h%c7D8;WaV>x`JXjI9zij+PqRcLZv*$6dV)w(Eb zF`}%u^N{owk6u9B!0o=Ek;uZdYj~iv6A$$~qXTE?kyZ>#8B}`KQ_rE+_&em-%n*#^ zyxq9?u|g%(h11FhE|2k{p1c%1QXdQZl~v>SJWq2fO;Yn_Uera)t0rDgzU7iz9FlN4 zJRIObduknojgdmX*@^S+@U@lbMX)ZV_+_Of28A`yC&E!6FRCBNaC!kE7 zhTAE0j-gbByR>dvWz)s2%(p~rMf^^eHJm|;ee${X<2LsdP6fNZathAp*Tm>kB@DM) zp|1KhM5l9%*#L8UHc>2{ZLD}5@&4{GRtV8kS!?GbdRbh>{$-!*qjIHXR$ZDiMV7;D z`!Hfz$iBpjt%7O<$B9Vaki;urQ=9V+C$z&FJP0R+@~!N&@XN(KF)O8N14?0uFPK`& z(;*5Mmr~ol9T!qpyq761#O$Y;P@yGFS7&j$uTq>ZviSY5h{SlGa$H}vZ>L~ht`aZD zqg&j>Z(}*m!2FQJMtZx|=lX<9KCjNEKPF2ArTKnKMn^pL$L=V_mlw+_aRt)qLbvzw z-RiG=AQw?{r3gogd2F+6jGhEN!aN|4EyF^q($_A#p?@;RaF8@G(tTdI^y(YIdfz75 zKpm5v-aY z43_MgX9v2(_KYY*`_~h&92M?UQSM3sCt=p~@WUt8dP3x~JH7e#yl#GC5{nA(=hcQ| z$FkFEUw86_B@#Z~cT@=oVT@X_?d~$oeR zxIqV?pq$j#;@3&t)rx+-d6F4jJ>mG9SjX?fICh_qWmPf>VTXA=nk``{5x6aiv8(gO zzJIMUi+}s*Hs3^A!Fwz1iQbyf-Z3Xr82He*NGSXHCRnrtDZR*?|Dytm#|u~aPip(Q|m zn!;i_%HecABUco1MkvYS=yyR9?$tu5I#dDOC6-WTv@One`E6CwCQo##V$v8xOJuGC z*`=6CFdXXw;{yj$KydYV`TU!HFY8?4qFZFAU$S0$21%G=s_^SDY*xy*6BNqisoE1Y-faa? zcp7>LrZ_pW=us?Eim0`XAPzTt7G}Qz{OI<3R5v9_OGt>z^&$=d;;Q}!tFzTxb~m51 z>2`YX7XiIz9WYybS4qnjMi=7BQ=v;u#d>;)(Ge$uo{jW2Wt|Ao3U%6i-Bt3ng`{hF zgA=f^W+)B0o$m=nju>3WFuUoRWqeAS9XrBWzf>`9eN&_0z|x+(jlT&q3T4RA-N#`r+IeQy%r?*NNKf2b754IxDqJy>&Bv~@8bU?o=sw?O zy(m7vc-=?HgQF~+f#kG`g~qITo&qj!=C+(>(^Ts<&y3R0kj-89g79SEHKHBB)|TV1 zeJ=WJ6kh39Eu8i}@Sl(0#O574Pi$pInn0L1o@_l!O&Yystqmig!PnaNJ)ey%ZJJhD zHfwpDjiATH_V&X@ciVP>V@}Q==tJASK~r^E2`>dF`OLun*n76K81yQ$Wp|9}%@8Z>l*~a^US4@?D~^LVx?n@b8kt7i z3SmujEzI|bpX$)5-D ztn50N#s5Y7$IJid3 zZs7GcL%Bn3SBm&Z-x#ZjE?@jv_VeK0Bgnp&truUMA#tcTv;tzyIv}zDRdJEX)f$Xv z&jO>#jDCbR?uqJKrACv-?84PJO66H17tLCz5hhPVRb&Im#8s(M)8@y0U^JzAm1N|N zY`%8`w8*2%k9)gj6bdC?Qykz0r*=ld(ES8-=u5i&UH&7i`UVGtgaJ2O{CGZv3*T4_ z>t)kY|75RVlS3*2E zdSU`&)&Zfv9OD1;(WBv~Xi#3e^cTkTpGq}Q(g^{96IQ4#kvsN(uL|Eo4n}q0L#h9T z5BT>-0$d=?CTJkFe*QO_vj)6R@3Go9`(wZ4IN}y56S56fZv*w*|5omD=hS>&s8lKZ z6r@+jD3mLRyn(d$$?OTyjr;dF|Ct2@3>1)Yf5-Uc5C8rc#tlvtQaiddGXHtl|DNAp z;6L_H|7LE2xL|Ii*5#;wSFcJCm-Wft+(0Hgm@`t{>`UbToijuxe9C`g`2P+s!7xr-SJZL_an_eYB>G#N4z3m1;g>OgrkKeB;>!s znLuXjJ&)v0KZN}=q4ysqmGc47pUv3t;W8TKZ`b;tz?z`V->fh!B5+%6*3~=YfAe%L zfd3Cb#F_k?i;{=}=LtK$m|pTX59?nT=sy+k9*=s*tSd((`H#u4yU|PB$Vf;?$dd0# z(%n5hH8V)aa7f4k{XMOrGD##X8xm)-(v|zYIqcU&7(wJQvC6E5K=(g8topmdR0wkBhoM01S^U|h z@@G3&4^uoGSwV)$m=m(U>$Ej|V9d^WFh8T^#X1f_}I1kjo1SVo@8%of$n* zftPXqWraPqo`2k}0o8**sibjdBuCJ*`D;`Q$UV-4B_%{!aNdg7I|5AtSV0mr1fwy| zNB@zE)EKn!cQ1AH3*Y`f?7ekVmR?l7b-Jor-j) z0wUe+#rNCa`@Y||J>%^E&Oc`i$M6~6kI#MIYt1>=TytLQx>$4|^2T|WYyr6pq=3MG z&AmF}GSHzwsSZOU9qHj=p66H3)8q%1qz(nn&h^hAB{7qzha?x?u8l7=Sgm>gY&THO z=Erm)I(!3Y&BjU|9$u9PT-WI0{=yGIxa0(9|ATvgrg3bOHN~cC#(&%^9~Me!rdCq+ zp{^T_xc2b-Su9@Rmm)BNziGSLQb?^>_Ed77YgNV%m_ky2RDhS7?S%%xA7YMUQkuX?e{sd zU-;e_cRU^*#>>1~;T2)=OqUpSzmcr-x1>@6y2VQVLjW^?7o+xPzas->650~QY^WS= z2ts(*Z-i@R-fcgTTfRJhQrPd86mw(?jiSE;0`{V`5h(8cC6n4mnBMy%qlDV8|5ADO}ZW-%}HEl63)Bbr2DMoGdP<FzeQH3%34>9JP5QL{h)g_d?PWb|OM~VQV zu9TX>sQ-$BV(u6qmD!^}qlz9PDP9^hSD48xPPrR>19yMY0yh*V>N8W`Pc)5 ze8k^9={LU{#(g*jbP7SXHBF0efL@IJa4%>XxUIbX21HeCYrEk@?>Er3zA;YW$ow7P zBM~%n7;*YF$o&RV^X|8wbc^YdDP9`B{n{6SEttSJF7jMl%PCS!uFJA&+#z0^p6I)H zy2~-(;jZxO|SU0wryGAf$D8t%c^7EjJXd2E1v>2@vl_*78|V3U2u?52(@x5!kcZ7jHk! z*!6{D9%M*KHV+Wlg^s(`p4 zcM62gZ-D@NT{}Avvg|mU5PWsrgFjb$k~4wDQ~-E_1Qje!w}%HwT*jXNsM>n$l5*Kj zD+n4V0+RI4;gAcg@>&*Hoq;?@JPEOk(Pe#FLE1q>@tp`|GTi|kjc=ZC3B%v3_lkrV zSBQND>SWQ-PnF{s)vVd+pX#UU+usg2|Lb@WLy z`vW{>ueI25`;`$@2H?~H$^=f+()eoTQP};SYTIXflMXxVX@%C<57YkfkWgn=OKXpp zK)g7k5B@z|5QIb^fhsbPZ9NV~#4aQ#B%-;ejuV0+gn@%q#>wp>%VobWw^&QUMH~ja zoKFPmg6hGnhT8%02@A0nr6zQ&1oG77)(}qd!lt09p>7|nM)(#?!oPEwcJcg+45+PU zK-QA}8VXI@h>$#02|L$wmry2A37bHklY>z7olsFtcrhxPZfCnaVb4)4yMl!#UQZfE77Hl;d4+H_~gue;#+^|y~4`~-7#P2`4Y z7}cHDfDX(g{}zhM3W;qi<>baqw;F`u-|j8;4D)2e2Ac~OPBXjN3o!1b!?v0U0BRq? zX790GKVYF81D`?g6;2o6U2&ZS%H;_qx}VJte%Lv96Wbro0U1#7({2oK6C4#8)MM zI0yeR^o(!?@Q-EhoZHR;4YWb#51#@*gGA=7Yudr4$lW3KfjB}?={mjWjXc5ioW6qv z5efJgu}Hu<`6#&h3(cl>%G41qOqhV|OC)+F{8^cBJD|=RxAXIrd6;YN_!#4bK_&Y; za1%PrS93n`>5wnr$g^NN^1I(6cjHGGZ}+tg5tdTgy}|7AGgxf!1r7Fi1~s3;V@f2| zi^SGXp(6mZb{HAK@`p2u^BMG!dHn}83Y?_oOFuOnfetTb`lb$CV zOl0yzb)Fjmon&IHefeI(2OK!+<&PKee!Lt>#j%-m@8K!QQW^P0KQWl)#y<_j|Nb*Y z43nA5$ezE6&{GsA@_Z)9KO{kP5u>@bE9`a8=hcf3Rb;{X<;Flt#gD}=rfeZ)%Q8Jo zq~8j!NG7Hy36h4{%7=(faLOVfS}J2&N9Zvv9z}s#7{UD}a0AHwkq(#zso3l4kp(^7 z^5I3kyw(uNI468&oF}P-;3yg?_9^b|R2T`o<}|=U`xM8&bzJiB&0UYY>X2)b=*A|rRn z#nsHGFX4T4BnP@69ay21C&a5KaByK`2pzv-9CYO9VAJ0k0JS9@A9fLLmATehD?a!0 zG&S;6Iwh;2>J`)M;ZI+JZmgTO`ykL+!aziOg=Os=gpwJB{H$&20;iFukzy{~F)9av zo9$4Y@#Yp0(puZrdo-C*!`dmmk8^>J>723A+M1)5fdjAa49rUx?%0z3XVwvXZtCR{ zW%H2V;3YZi*yORH)8@0%?L+4Fp84G%cUN7s-0Ws8z?(!A;_?PzW(wuiG$FUscCVdy zXi}A_ma_IhK=O%LR~!k&(&zak?kJ42E%Qa1wq4?P6S%4Oj&Se=A078f`voWl#WcwCEw$^5%A z?b7!wXtl*+oj|v+dDSZyQb6k8`){dydPMeQJJZrGkBBme9tzZ;DAwlXnRUl(cxa}k zY?oneYZ>jn$*#DQYj)T!=Y@e)1I}Bo)wA5e0ncTy=h<<%3|oY{i1b~@NC&K zPei}%#>Gpp!(2n;evK>9F->ujVvreKjxu&-U2I*Q&Sl1)xCB?Htunf4hsR;eW-{oO zkHo=@7|XILh8mUKAYp)RDl+e%AHu^oPTrt>$v>Y!(&z7#xnipAP+e?TFFtUjecbwk zO=7bXtk|uU^MM~9@psT#vw+kf_w(r_Rx)0zQjU*O)BaO1NmL^gm!`&BgYO7Y^w}A@ zS}7ZAY=mD%aKt}<4(pY(Ozw`o#fUiYNb%l++tk({Nm6~fEXOVLCVz`PBmrLRYQqad z=-{PKL6^J4hJ&2bc4S_EXa+6P$3&3|b-ITWV@=x+kMxueirjBRK70MEf`O`^)MZh20o=ec!O%Gb`At+Cz!7|oV6TKqDK^PAX(5g-YE zE|>?Tywq;!NuackO=^%Mu;N@npzecPxS-gL>ljE4SQtN|`|fBg%D*Lcxpg&4WR-5! z0#ZN;C}q769UN z>;pqImw*W}F-EMfIQ>wK5bHm7hyi5Os;^7CzNf`R5@vI)_KNWvp!d!;zpurM!elFh zc|1_@>scE#p4uuGv83HVyv0D<@p-8E;(&`=_uVPbBD5cEt|9 zHS)UNM}mda`F?NXCx^Pt5mT(h7!|IDD}8l8|IPmcq$J8EPc~q3x18=eMt9o@akCK$ zc~i7;zBP}yR&U39xhzbWAAFd1>G=KA5%YtYE(q-7V%JR==Z31 zatb<3M1r<6Q%%l~@#KcXJ|fE{64xMo6#TLt%JC^np!PHIa~57+>e`)vE^8CjU#3i^ zw`d(^BRGpy5GUPm0ftPew52tuWqqg`&(Qsipt(S;!LcjScYHLrM`#-)8Ml0$H_G3F z*5e9;S2*w}ZYf@P>KQtvts?99(bbP2a@vp2_ib%Z+cKj*YdEgVDE@8f&Oj$L{hC6P z{LXhCrVlD4OTjWJDemu``8i4eoh6cxsq8vmu<`q97&Am94Fj~t^|l@yVT3q#Mg-Z% z=&M6$WK`@JDE6IUtpus=e{HO+pvHAV93P>QJfolWI5^3MNGXqcN@ueN4+IcW(3=Kp z^jS+vomUyo^ljhnd1cxYgdh;JT{ki@o=T&$EGVu00{iJ~{lm4bLqs2T$_=((Q#gwv znalk8S2QoPy^i*bU4ohJA_{{v7uGiPg?ex}vb#svyi=_RN)=bWz!EY4@W8-|ZR&}p zVnK!+!-5eg!1A@Sd{sIB-ZJt23_;fXmzgp&vWmz_C}bIV*@z{ZrodC4+?zPU`tJmb z0!mm0a@5iH_$F`34pv6OhBH0c2ii_bkz&GDq7 zn!IIQFyI_W6ikC=6cn{npss*S2tU;n7fhyF$kZF18#U&s`Dx9a&HMz@vOGp=XXs?_ z)=FGor0T5km|j`_mtPO}uZ60!S`(<2&Zh7hyfLUTduDSN?QsMx19kbzo@ec*^QOHb zup(YjTIay7RgS}Ga4{#%p$~RKwWqsRgM4jLozPzzTJC=k=epRP<|?k4jKux`Q&Gr{ z`d~OIz2a0V!z-kRjwAntqVHCbT$3>=)g;-5p>{s9AN?!wD9${LlLUK;9HCm|FM%va zRw{kRo4+eOVlS~zHtfG<1fs4mezvonlrUZu--aN7E4Yt>Dsm6R*~3hbQRs6$2G^Zt^11X&L;4f@g~vBSv9c{UOQL#Q_Y+;X)4R<0 zlkRLr5%aUfEi(3!UYGtt91RRbc9HF%*#LzJnqHq?hDDyx-J#B@ zQm)45daok?s;=jw!DMDJvcG6jA!LVZnxv4j*k}_S&_lr_+~69meM>j9S=(b>J~mu2 z-9VehTS3`{`_ce&pHB9=crVy6%2*;^pgt z>s!Pu>#lB;_!}QBHU|F7Wfv9iNG{c)l^~W{Z{`Tjd3~`=wHFoJjHR7#FRAoTRi27`c#G{`ML}@%)2~9yGy2W7sx;7X%fl(%Ra2ggo!#mffI$ z&^YvrplkZyhitFfAC~t-tnAk`uw`&tGDznO8B-U1F1xQRuu+oYv0w0a_cXw-VaJ#< zbea#I>ssYV?>=ikpJ6{eEpvU!o6r%s8^l9xf3+}n%mf}m*iQtttp_4MkM!vw=e17h#{{v>+~aW-%^jP$Z1! zKng4nr+3yNqbFujqmlkje`$2wv}C`7PPZ!kwjX~>fFbyVAT*y>A}@qDfXjkkd@kCb z;q#9ktfFzaB4%mE&}xjZUG0xn@{=@$A?BI(Xv2d9k#R5qQO7p}#15|G6X3^|4Cpgi z(|rDs#Wp<|pX2u6lU~w@zcUAu<@_+FElPQ<#_p0&Q*qyQMx8J;Q%Cja47C|+$BV9S zOaiCS zSD05)uevnd@_a8j$eS`w&OY;BqYYJ*J+ z!ccC$Ee=a660^K^+}jg@aCkD0E*v}*1-oiC6vxM#y@q8Q5tgaYW}?;=&VIXjVv$0K zr@Dn?eg-OMDi=2zi~CuD>aItNr|%Z=+7KK1OLGj%7@oP>7S{+gsP<6Z3f*i5)tZ)F zBDxH>;rlrf?hsjWU+y<%FDkgne@DTYT&=h_D@Jqnj5GSQ_gj8jws>D4I^!0`mnO|| z*P^HLTcs2kZ=UE6^dwTI;q0>EKPmn8dV&<0?$-uIZ>RQg0Fn_kr4P=c;tM-!#(EHY_X? zWS+I{r0RQMutF$XdA|QvLV8Zo-A%zwvgTShpITc}MDR!^wWK zn_N?$Ax&7rf$#5YjJSC%(R#$9jOQ^MtiL};s8UQ9VK29girFhJ2$DJ~7J%!b$>hJa zBd{xh@a4TLPiBV23oxQG=9;@Km+*>E)J%n3C8+b<12PllYnF5Nv`{-~%H{YUNg?~7CVpoM)< zeXIHUFX#H_2L(hqT=hs63}t@>s9~yz(5OLh>>jNkKL?&R?t`HE}sNyN0_FdIfUAC;3n4PNZ8t?|Kbn7 zweR<@uLl7|87xzvi12^;^Dn}9c17ZmzWcSebW$xfQZVHh06 zd!|r5XrTV81OD|v_1V83D^3e^e}Ik@I30ffYp?|AHH6I+1qLLNff%%5?+MahU;m#E z>oB1xx*X!O@*!waXqfvj0RWq$fICBQ2x>QQ=yty-)3q^DeKJZ664HpxsIHsSg<&1r8%TKF zfk@t;M$LZBI|4c&Y71|A;(o2(-<*NfM)y$su$NurNKh(px+P+DR_(x*!?*$Ef%PSRlld%3rru6?DhyQ8WialM`r91tjyX@-% zTROM>T50^ZWB%-8!uWLo;lsWjJDs2084AhU57b z7ySKF>t)CoT6SiNmKW#|u`;T6V3m|Ulkg0)xxAFe%gupFb1)ne)?Qki6 ze;i39@KhBLisFBKsMZ`XkSj((Bb|R7Nm%eyn*JsEe|)Hw7hoXmYs7b6{c$9rz*8}G zssBGsFMHJza{kjOH_92`oQmI1I-IQI{{BJ#-_zZ$4DPb};Z@2D7$WR=xMB|KkiSxa z|M{>_3AIQuE16PJ7%uh}YIKZZ+Yepj>bqaZZ8I-S>if_)8u%k}%)k(Z_7<_ZnypBV5a13uco{v zfm#-gxRkfe9=GJ4n}R~#9zw3!Mn#O%;EqWhju<>LAH>nlfHk~%S8k1z@v_Gt!rVc|R4qXY*q~xi#I1~qxYy1wYLZ@M5=KE6xrQ{W_-BMUF z8NjczK~VqvKuJM(oLQ+ZgNz^-irnSe-u#BQ9jOFK-l`;?YKj_W-jE~MED#P|0{wJb zBZ}O|y8Fv(WYv%=-q-W<96(o=$bOE>o_B`2|M<}~|L_$^aROnHYmvmEIeo4jH7Va$ z)kmg`r9riJ<}-=MVaL;4dBCFkZ_0vdNyDDW_A$6dE6!K*JZ&%*arl4HC8Ar=@mJjv*_yLTXjW+cE9<^Wy8FH>%qw}dR zH*vyE);IYDifL%?&g-PnUl_8x%waK89=(pk`A8obkoj#o?*89g0DjYj(&eJ_6hRya z7CqgxZd8>ux#b%H>Im|uVigCgtYzk(+<&;`Co6F-zvrpWtk!D)4n$jeLN(P7bEsG~ zkFoBtn?buT!p~s7A#pk4NVAvUa+X$v=Xw4s#ATPIgyUn}@cRBH)RpR78jEfqQ!qIb z6I1_VA?ik0Lio!hL9ggpUAOl?I066c8W0k8$t`f+)TNMOq)w z31UKG^uyfef@1zk#B4c@jK`Ao+_!%&eW4)@h4i`4!A!pg8X>3cZdLzm=`#{O%l%|j zkJq7`HM9SQ9} zt9laO0kGJU5@nWcy}ayul?2KBDDzK8r*krp>-)C=ACVQ^_noW%M`5O&nRFv<=f#V9@-(fNK+)L^VGyXUDR-8Y!y z!Qr>QMuiGLMd00Ec^`dzLUx`Z8=XLDQdWPd`I<%wXHSID04e@e;zRgCBc)q#C9FMd z3*@!!v16Nh%oTMb-de(Py?Z(T3iEId*$3A`JUvX5V1PmR{Rkj974@O-MMx7JG>&o$(H0QFLr%mB;WMA~|F93LHlGaILG{XOC ztBENcUt~<1#yT+NP#J$Yfu{hG;iv;)iA+nWVP8^n97@?MR9sxhPBldd<#r9&VHL^! zoHvT%o@MfQAJp$i!n5wxA|A7lH~RGNM3<$R5Y!*HQUfazG5z|8v#QrFWnwvE15YdM zwSHQD$kvLkO_rXtPGN~6S&m!E>^A(WH2`d8wD!tMyT(EpiDH|Zu^JAV`sv5av46f% zTUbhLwxuy-`uB_Dpfqyvt+-*82+-w;|Q5N)uBNUi-{%2F# zhPLcUp)-`Fx@(R;a-KI|u77P2URjYN22tkRKmX!_eFV(l(iUzk%ZWuH5JgHZ8PrasBgf(GpUNLzgX0b1Upc~7cMQ~k97JgFK zwjY4}dNr*J4j$2J+BXjGwu8wN(xJ&KL&^8$ z(ol51U+dIX4r`(Q2HVzYs56`+GRs0%It3FTv?l+ah3H;Bi4J!Meo;l}7mS~LoshX2 z%5fY*%Mz)S{>&nFwEAQ^2lNC)S@*L<;kYS_toSE*lEjZ@CIO+`;!9@K_cF_YK zERmES9~x-DJ~V3}xH)=JJlmKUFCaS&{Uw6G5CnELEysyRuTywCzuYxI$q~&-q-RjM z({>}sDz^l^XIDh<?2R0-Pt0(J~zQAf~b$kDsqX_%=HoYieKe^HE=+IsN z6odOEQRtP)cYyvp!0FMCIDY=kw#<7TPd|ucT!QMnG>F6Q8lYqXZb}AEo-jy2>31v9 zMW%VdlK6Io7%<)jW4}NUG&|uk z_5g|N006c3>S&nd(2$nn$oQ!;-yRHeDA1Qo>*E#(^A=N-3xF|nr>saLCFuv8ZSIoei((JX9Nc| zpBew23KIzy-GV04cUbDYsCH;S33=9+SMMm){nT8ZnoAB%j`i<^IU#2N5|xQHeh)v6 zgMtbM0IrfyF>~~3>_82ZDz-eb!pjT8JN`Jd@d-Bkxv`V{@g&Tt?Y!h!iM%Ml>DN~a zyJJ=Y-QAq>9{9EUT|Nz?H7=#<_9HuoqT1ksq{242s<1LF6~QC!$?Cf_!Nt2+0CVW+xMlPz2P?~P~X4py!==b z;;cv`*Rv5uZa-L(gjDyi%h%cM2MR2nKfTwyzo^MRDS;bw94I?bL8q2KyhUu(LvX-t znS~9?<|Z*P^!e3~Z{QeECf%}nJ;3KYEbt3RON*jW$@^#tCz3g;z5$P5Vy3@JgkAb3 zDw$0)B4t!54u9OuaDf^n@pWE99G_1aQAbMOus{{Qp(|z}(ZD1EB!^`sp%)s7dyWOa z+NuP{C0+!Pd0V$?KsSMXs{@frsEl;=F}%o0fF!zbqcq18)_Q+a*`p^LLSU(EDvuMU zaL4_#A$g%KC_DkwD@@Hm`_<}?!cRb4Guw4W{@{n($La+GBAkX8VOxE@pm(YvQw^)Y z%8hmkg7p219yX0vr`DFOedB+ET!I)=y!P?3xsy_V!C1=qux?NU)T?+;C*VFEH7!X9 zBRKQ}#90Pv>W^N|{F@{~7gbd9`*rxAzzx{%0ml1DC@@UKIZDhAv$mZzJEGNNIG>OU zPbH`6BtaH0mh3a0X1-tcEtwA(GA^Y3jI2C=XzWF-Im#^%u zrwW+u-L12fO47pXa9Y0@q>KK(nS2uQZk7BYK^0RbEO8bX)QxeA=@;c<7A4g1^PRn! z^d0sKuOc}nLoCs{>8n{Ql?=|1iCjcth%Y#whlJT#^z*#{&@!FIuRkvG(NZk+v#&ZDfJ@wR6;OrZ?u+{I5I4YszEp0xHu z3%4jpHj(JcqIi~v1n#r3H~^xg%qx$Se1I+S*TIQ1)I)3FnJ`4#5V_e-YEXFq4sH2C zaq{C=ZT%2;HS_``L|?}wX6GmgdL$B9>vAGynl7W>Y@WZ%JJS>toHf&=jgfk?L&hUHMU#KIWvUy z07a6EDqZKV@c%MIIy+az%Hj>y&>@r`MIjvkKI;LFGRzwXp&kQ4dsXxvW1?=aUgFjD z>Br9BSHEkIU^se?a#8Bl{MycPe!7xt^V&*J_&T0I2>~4c20ST-xc0!1Izd%m4ytEA z?#o~=4DOI;p2Ldalxl&BGc&B1)`E~IIkIs?t&d*Rbg|x+_w#_lzrnSd2fw=I7V3B2 zDC(RcugP*%1a_{WpW$ly?+&7k^ZKI}1;2>>E(vUuRURWBOX^%&8PQ7-ti+1&yDbX# zA~6z_APK<)cBKfxyJN1m=)}xMo~hIPCex%0tJ#}Vg#pTAAYTDX>fJ+XmTC&a~%>G%nA=^KEo@I;Iz-0+E% z=wHYWPDS#F7ZfHjE@>MRvLdIL$vhhxBDeI8G`M3h3~B_Q`c^L8i;E8tR7_)%&G}nl z$6DpEFtxg-r&Kus+Ih-6gii{7Mh{AOUX8d)LY>xCX(4j1f?M;+Nl6w z?sWnK$pRnMPL2ec;;KSZZ|%H1{%hUMe)GWf1+$6h7VT^8+vYMBpb`ciC4#f-4Ol>u|6 z$JAqtn)`D&X5fC^#`Vh2cle8=p7$o{3ige0j{>x0=5<8K3^b=5cgMY&eM_4PPE553 zjcHvYNwnHh!`z-9hkJpo)ejF)r?Fa5qYTPd@l(-c$5x|^O;e6dU4J@0O(+VDB^ka_ zeaxbctBet-I>)M8+|ol?L`}7;UNJTZ2`Sha@nJ{&6|b{#y2-UA?Kw%thZmwZDSG5k z98O%eov`BV-$dh+3^!4-mcrs+qs`w3KaExo3fiY_MzXz!|v^&eR--h$(OdIXC` zO>pV2dx4wn>s!5^1idtUrcI5!TbxI2%yf^Fv!S}20&9cKVbh!&jW`C~TE~J~donq* ziIcz>RuFpqbXe=+rC!J-($TE(i|%E+jFB&Cy6I+q-gq^pex2}KI2_h;Z9MALz3H-y zU~~XvJ3h&8unuL-B%mq`HnHfJm}e{FH-RIFq3D}3dlLijZ{RI3viuL(7Rficonm@q z({emywef^I57Uk4=H!wm3rD>aPJznpu`)~~Kb*-hz^!labH%$0g-!%8Tm0=K7jIMgRo#Nt}|)a2QFNJB)fBj8?s!NHb} z+-=%v+fpJXc$E;-ge)UJ{oU<-E@$>Ck*tuS9*xYmBhoojGmE8e7W6j8CLsPs|Cy*a znzln@CkX*>)zEO_M>Gv}uYY07BLJl-IG;~2e6%yOO!u1nY{T4pl+s*I)=-GIm!Xrn&b z!0@|vS%%7qde*vo0ex92+e-@Oxs*TR=@6#mb*JPnK72c{?#2d8z`;TuL0f5^8YMCO zH0B=}48u%8^nq={_taD z?3a%olyK9;4UAmSoHs`-LdQ|pDUII*NNSZM)vRl-laIgP!;T70#h#JukV}#^=Ogk} z*Y2f4r-#R9?tD?#v-Lg!=M5~TMNlFZ`!(5VVh(tc)Duf@EK^)(PU*!K47;7-v17<4MKFjXcduiwP2|NrN)x6eHCdeLj7^w6 zdAjq91=CSUrRgJ22eqW7z<`jKq8whA7|etT9q@)tTi|v86 zsRnxy92i5gI#qS3Yda~!9Xz5<= zWO5)t)`m#phEaGKldPs;&)n;Jpg?o)U6Ps#y=WjqvsbFmiqckz=<1?WJ)kue3mQm@ zjlq4(`!G|1v+(MZ9px4!xwm+VAq~ll57@_kl$+1O*)&P$)>dYH;mX}~xnzo^bntp@ z0jD?aMJ$eD>vJOVo+2sBkbvE~@&G#yMIkc#naAmoSG+I2%|i2cAotu(SeQTQZp!=c1->4Kx6!log>LJlFNR7YL#x&>dBkY|+UVm@ifG5> zqRp&JMqto_LoyXPF4Ftn&k=`03)Imhq+YUxnunb2GV>KgANaNhEJ03Wj%R4#DJwE< zo9Uc{WO`o<4jo-1@|awN^!F*`9nnNH; z=}0aUeT8aI8Z4rs-|8>z6y}eZ-+_8{lJfx@*7Apqxip0 zd--VRz?O@x4?KT^e=gdd^i*L#DNx;tQKZ0wUG*G(~tiB$ysIK{9U1NYD}>R`@fOug{SF|zkEIQD5fx*~7( zwZwEk5yyl^ZC;g8kbKXL?AcN|3j3rW!P|x#n{`mubFv+_5Hbwiv_LQtYKX2VmV0(m z;p!f?ny`$bS(&g*KsC9jz?l;G7QTWwEO{f=Bg>6(qjac8CpHel0X6hJ*j1lTNo< z!S38D+DKWT^1&M-cI;&V2xqiSZg zgQFv{O_Q6*%8ue*3m~=H+{&i}fX^Jxo4uKjgPcwRy7uE+BIhqj5KmQCvupaJG!k~! z);84?BdQn5XcrVH2S%Vyuw?N3dF(0=)S-E|M0e-x3+mnkY)`_9-9dscDGk$Zb#UR>Qy-CT0AgKqu?3I|H;&yL6PXUA}fVfG)F_{K{(!w zqP|r2A=FSPA2>oO!K15IEA`v8J6~@W%yEUg1iLeKWto+*#iutmwzgo7T?-T{7(cb2 zrqsA{DHSvLM7jyK*&gR0ndM8O5PPCv9bnqBi@n%aWpOO?9%fg(aQdbZnw82I9k`H3 zpzV(Nz5J`4;g~bFv!eNN;Byp7{4L>r8>6#KhRJD1Dj)V~!^uqAGvBq|(7qY{AiZ*l zHqXKo^BGfGaqaZ8d3(qEg>QRi%24Z;ioD~5_Mt6}?)Ot7js|(;(L}>kl!OVh3@$*0 zxEmwmt6UTOk_EP3Z}%@y7yby&?R_@wcTeHNtR}+@n7|zBz|T=+rrlL|re^Iou_q@$ zGnAMwj-W-EjJf4T{_a;BBUU3iJTg?ikGbsh|`qOI~6 z9O-7gTxkrjw#HUw3P}BidOk#wr8+PJE9|}1FbJY}y)4u3{rL|L$-VX|Nw#OvtSFOw zLVMvMbJ@aDr<;miP|?hJ9=@&D&pyYoFS0m*Lq@je$S%q1`b5Nols5KKE&n=_!JI$0 zm#%KVOlR^c*wN#6U{(z8Ms?K^@yQ|U_iy-a{KP3_VF&H7W+>Y{6ZWJ?xFTeg(h|R2 z-RV~oY`<$?o)Z-mFUUFjh9|vRp~=9uyH<<1=NC?P7_8k4gg6t|y>~$lEQ`W!(xc1v zpdxroky=%KnP^EZ9x0Y^;hu+uc+P$-haY|3og!FTY5h=LOm03K^Y)?bn@+-iQ-qsbN{jp(L}`bXbx3A9JLs|mAQTvz5+*dqM&$i4)7~Pb38(>=!}fk zr$ODxOvCBUOHXvmU~#S>Hehtz(Cx$p4m8_kssK73(;C%haL)B)I~mx0r2 zBKQ_+CfDQd_onr(w(jnKSzWAyQ|;Ty&M!V};vZR@!7~f+CkB0(ATz;w40-0$O?Q7@ zpPr=^$;4OdUf39n@>4FhfOwYXk+U8qCCaXu66C!xW2r zE@oFFlgmmDy-BLQTh8^mc0FZHJWJ1nSs=2kQQw&02Ew>BalslpM)+3L-2IZ57|SED z4wc=+BpAgEq;#{S5A2AMJhWb)FAUTQSDY!9Pg=G^Bla}wANj_8q-(?^vptrON!-SH zMG^LhI~nz?BRw?t>xA`Br)1QpGtT@0=V{@1q6Vqvi(~K#3m%j7Z+(x&8?(zJEg&Ck zJe~{3P>UYVyqs1|W?AwY7$G&^HT>z77PsxOqpMY#^rMdeISZjahO7?`l0l@WjbmsF z?>knYXBh#UbxoEu@5JkRnYjB}e25$zyl)!=GEHpi)dUK``ep>E`9h{QD{!A1mkv$L zZ6gDN40-1cU83~N_Q$?3FH1z*hZF{+nSA(4O$vFBI1Df1d#=RQ0nYNN8}Siz+i3+o zGB5U~M}@#;llSx%tf)cMk~{Tbx^nMM;`&q z{7CGiBIU_spk}jgJp~zYMXaaTipydNvPiZo>P6Zz|68n0Vi=oBna_JO26Abt2D-PLf8o?ymv@8 zAWN98VSX3izYsWM@flfZQ+(iZ@RT;4I)GcAiQ2=BlC{03iZPHhFL26KAwtw z3Zb}D%3dFtYLzFstZLt@(|Y-^7}0WKthd3^)ROH-+`PU4?kC|8Cr+23lLbfZv3Rz& zuaQ~u{H%zm_!`uW==UYA+mP^4H1|7ftZ#~`UeorUT2(w-vy_u_%K>$`MWa|1dNb~Z zRaY*ke7iwM4g)e1PWdxM-TA~W{{ie0MAQokk_!i4CRm}fF|ML`k**!5^F_-~0c5svESV$T~wZz!XU z9w_=u?^ZD7PMeb_KdrzD@hmh~_>k_p9IA%|j==huXvA@oJ2JcBo@DN21X(diCZf_q zJRSLvk8Dpb6lF+3Meij97wrHdS^m+}H2;;`7dV;YhQM~o;poh|oH?fv;9(Iadkg_s zJ_O%r5X*AG@moKa6P&UV(|i+edr4$1tdHzfw4w55v_IfarR#7FIj@W%<3G!h$uP5| z2UE6;qQqn*j>GuusT0UN9R{S1f5iF~^Tw#zQB@kn)<^LEq?eR#H+&9u28#XHt`7Ww ziM3|NW$mRmXjk5SCMPZ3;Yb#|U!f;>CL+I%Ez(0tN5GQaL~E0U7izn231^?=n8_pK z5Pjwm?1rN9xET@j? zD_^!VX%?wcYvO%}KA*NVjpPhHApgO%aqzADw~HhdwCy_+_vn5I=GaUmeA<(;2858o z&(Iyz15*oVe077=-g>7FpmlV$>g`~P))`|Jm+!ijJnkrDwX^f#LJE)Np7>I^!0*Fn zkZYk`+QU7&v#OW)&n>tCf z6*}ilV?yhwihT+GR|!0`IeNpVtweEPz?exGDO?ysKd2w!_~ktN!Pv`KU8nZHxO>Z} zJemf769U2A-61$P65N8jySoH;O>lRI;7)LNg1ZNIch^9$ZQl9s%g@ny?gzJFZ?`gWFZK4 zoSVPTu%AtxLk9Peu8+?DH5CL#1z;*j7d(rIY7Viu<@8Hk0H0(nq~^m@>*J_<9C!y` zD3o75G_H2eNZ|>CIwi>36dlnA_Z2N(e|n0QPhB?9gaHqe#FP|2a?^wIN2o|lsNe#s zg|Sua1KRZ0X?~DnPxt6~hjeJPPECrRBoZHDE3#R&Yf2{eX)!C`M|3TjVX4+UP!Af% zICh)7rT-9GdXpu%)CcueQSb_$KEDy)v6=Za|G8%xepklZmm*5Zu=c1sQODSVq@p!r zkx}=ieplppd4U(Ua7%*4?qurtYD5Z4p>091a3M5&bAniT&jcMV*f3N~Ynl%#k3mhf zw(R`&U#TvC#@(xVvR6zMM_owZ*!-(yBNvq%T=tXYW#w%S_>-yxEb$te-(Qr? z682ew#v5WjX5#3u>wyobt!T4wcRqh~H)2|onJLebmSi{gLr}e1i75PYMqepGQ>{dj zj(P2fY~2!lN+AaEhE*S1b1xIA0VjyFJY;@09N*r#{)hOj*S(FFS=GAnnj~72j~wNP zTsP%h{+?0*HX95%g=8-RBev$vNSn?#L9=^eb(*@lAbP=1XPp>oi6-#3J?Td*s=Mb~ z3d2wDV;O`0D0dCPf0o;uje(b=Rjna^tN>sHixrLsKtNF&sQwRBM>h2taxudLSnPF3 zod;@p;ydnq5om9ZfOw*@{vy&@T2d$<89|u(V<`$t3~iwFc^=$RaxHZ>M4B&!2g$;B zyF^`L@kU7D-lfh;pE3==~7>5JS_!k;=izTK?azdhn$eQ z^25YfEtN2qX$){lN9Dy^g{b#Kso;)CDFerrH9YYTQK<_{qS!eWQTPuiy@6a%UA>K$ zMwm3yEP5=8hz)w7*0BpBIEuZ$k#k>Ct z&=-V#PuFZ7{*QRae;~txRqyGVg|8}m{}>mbeoqv^6089h#{YrY0^c5fe$NtUMV0dY zLwQI(Pyz*_@ypl#$6t1c0d=8QrkM24QG-CBK$P~*(*IBsA~_2{UHrECTzdUqW&Cga zbK*N#4H^BOX8hm3zr_RU!m~n5mFOSJgW`t<6zKn>#(kGowuiB-`MkZz@)nB!V>^oE z_`nX)7Wh2Ux%}hl@xSLJvbWWY{;^75q=71Z-K(1YN3{EIVpp}A65vz1)AG=n1SUtf zV17q4#maCEP-{v_rB&~yvbqeQ<7dR2&u0Vu{ne#_AQ?RtA?4<1y$zz+%oOu?hf-r z4f^x=r}OOv5dJv<+^&{!)aO;!g&OOn+Oo>luR!$eeUaB=2vPYwE%jaOBr5d&s*`=g z`{o*^&JCMUM}k%v0JZ8ZrVpp{!wh$#IEZ-N?J-y~&2Mvbl~CQ*T(DifV1rCYFpiA? zR&goT8TVb%Rc|>n)CC}h!w7zL@3V;PLT=!m`?d=7@-3|zbRsb5zYWeo_f=-?APZA=IReK2G4q(41{lB$HUx+xT zS=u>wV}%t^0!77e9e@RDTcCl?haoS@^k0tMe%%oMVIF+9E9`>YJjdED0V;dvUGCdN6*SD9s0pP^4^Y6g(RDw)A!3TH-(329A>iX7)JE$ z!Wmw`$@sJF@(M5tdf%jP(Eot+9O^SqN1>G`JCUH+5Xn3GEn4X*7VktCm%P;j&m=;g zyhr?(V*aB+=bw&?Ou!hkRo6Tn@{Sn2>tM*=E*-D7`|D{HdriR=2w!iD1qgXu%XRm$ zC`99jiN;ATq*E)~06AopUCUnb_oHZF%I8}Qc9)R-Y_i=KcAmX0u}g(3XC38K_6y&X z6N9Ml8bCr#QBui_$A>|~HQYTsq2z^6NOdXHy<6ma!+Wk8?<3yur)=K$+%b`ExFF>R zNqC+lqjHr{OuRhVX$MBfA{rzh}k@ zeECAvRSE5J6`_CXM7j_0Ug>8M(IIr0I7dGd`xOzLJtN^OCs4IY3h+KB^N!6gVXyd}L^X6y9&;?|4bLYAacwl06h?}l@izpILahxBhi>@A`snO%Z4aDEcS#YIO`5kkw` z6h3L+3-GzH`>A}$4ep{{HUKJULeEIa#dl8HR9$mf+LuyupS|xf>rXrM3d#?X0j!O8 z-J!snH!oA`VV=_j7qt8UM~eP;F8;{ysc+T*quQ)U2XbjLc#j9zAYv7QC3w`k2t0Hn zuZ~55CQBL$eXBtd9*3e*82G{R)J8mtZ2iL$?6FBX6z$bo*T_XR9P` z6BPx!46c#@u#-=hGP{FkBLLGtwogl=zGsTmdJo#wKzq`7irh#=m$x<`CPS%i}A0o(RnY7)*5MI9tW- zkCsC5sCVK-9)Th?*InhDG9TpSKXCkzVt=T{y&R#dLSWS#&K#0u$ymhmnsaMI2vHKL zlBy&Ao7gsEy10|O@wmM3ODzxbHda$S?_Zj$_ez&iKapY)J2t>~m7wmqR)T8O2RQs{f6TV3~v z@^J9v9VslgXgpx>hN`l)#o?W~8FTk{MEz9b7j1qtyL6oywG=kJQ0vEwPjU;JkNASi zQGF7#Yn?l3i^}SeqoAURTG;oMQb>B)i0+Gs9E`BsX`de`A$KJJv*J0>&uMKS{N=30 zjqeprvjLhWVwkkqF<+RlyIGUnn@cc z%(&M9(}Za6XTsvvy2bo1KKn3tP+cL(X}hUk&@`0oeV1`kBLMGjo4GNf3lUfki_zM@ zQ>sPK-7kPa?64$1x~JO!-dKy^*+O1J*64Ru3G`N+-M16%BR_Sj>kWZ0%d7QoTk!fM z2f&3%F6mG=Ysb|G)^|(78*~gkgTU(h$xJWLIcrJ8L*KUjO6>XQBj~#rvVS`68W0Y- z3g&1B=Lr9_jzQ3_V7B#8>DDoNOXBWm1Yh@=;92Tkor*u=vhGqs1$Y@{wTJgEo+AG=$0C&~jR=y1p%+@<;R{cH zz&B`7h~V4SE}ex#(#!SIY77%s?Ny;Fb8yGEb*D+bhdNZuw1P7u{nR%d$vDQS7Rf2p z%imX0@g3B#zpp>T>aD{wHv5VdVe(0Ea}3)XS*_x;5J*umtpUI~_Fp#VPb2m?$QOji z{ZpT&GZ}jg11X1V76!Q60OWV9>BLbDMr|DnBH;&!w&lsUi*=H>_cp3Vq<6_e%FO#Y zo0C`uA($GgkZ7+nQ;pQ(=n$5(|C3BQW5V<&@n0>U2%)TW4)3j1q6EGnz*{+69+KcX z8kXAld2BIPQWXeD3-n3m0c%#pY4S7Z=&5U22>grJQTGeNwI1nNm@`HirvUY3F?EI| zdD*K3=4880iLAd{J@~WuU7Z?NdDd9ng`>_&?Tl}EW1VUqtX3|qfZ|%ia{Lhq`FC7G zAh@Ae4N&>pN7RQynz^R9^I;Atam+VZb(#At99s8|=mHRKC^eD76wff$olU?}9vkI? zz7O9epBamozo8C+*NFE>r0Ic3FXn;M9 z&Lb!5b#G@y4j=&r=m{aGY&2-hFTWYlB&a&8*TaY#JUIn6OcG=@Qe4ki7P-Lv65y9m zyLiJXKYr+i#w}^RTex0|rKo~}plK9|$GZ7V9?eNMMwC_F&2qUti%*q?^@+}`>ib=p zL!DE)O~SrrbY+PS{boQ42W1TrLq%R^Vh|~e^b(_1ph@o2VSdd18bbEh8Zx|J4u)EL zwt8fz^&aHBICD`5)w3}Wwom76OpX!hE{e26{)$R)rbVTO4DJ-4-N2?Cn@1VjK$BOi zWR{`H>6=Pp>9(z5ZS55$0=ziUz^e@+d0M=}u#VF)6pf)fsCgm;;;_|v*!nHsAOTr5 z%1j9(Es=>Way<bsr@LX!CVvq1&Isc$=OOy7C{Wzo+0rM^bmw)Fc{3a24K&CZ=3Ee z=njsX~|{NaGx(yV(0D@em(W~ z&H{L<)SBy;iSHc!alr6=3W5$r@bYi~MGn5gAbvJOtv}vkSY4XOnRLc&Ty$>;BjDi1 zZoOMwtHNS=;!p%uyGIl9wd=c_Bes<3F+mNvQMG2Y?UZgIFjLpncBo!+v0P4Dli$vr zoY5(yz|#9c7;IKXld#Jx$lnfy`6WPaeUAo4)z>XCrc4}fZyc9*r|buUiD{rfboGdk z*Z430t49NWJj-$1b>@@0%Q22sKlA!}(Tzw|5UpTQd1Yj=RIOR-*znSr^^b_t z5lk8-!(@dKeWRSB{Mbc>Y7cyyp_TRf##>%iR|*$1q_HtY*rfnAJ$JFI^b!a7Bxv;O zL&jbssBw+tA3ta~w0r_fridR4xfHZSJ?Wk+3i$J-%he)cC5;mygN*8sOCjqKT;Wfg z*AuKk^ReFwCf0yy*@_t(ey|ABJ)pjgTf_lQyv3GHO2XTD^fMbw}a2U?2m0v6|^m^`JP8X0cs8}D~sMTlIgy5abz^xnN<($?TTsh@+?cZX;_ zOA8qw4aC!?jqi-Q73z@VqDFM6dmyjjp*dHMz$SOXMWP;l)lJsfBEmsPF!@=fdWL%! zOg*w0?8~1yXu{{9Wvv$6X4a2+N3MI(=99bG(pC%}#r8Lh;>QwH^^I~T46ITb zY!g_>_q)KOmpW(R)x&sqssB>afwYMAX*vzELd-F#m6* z$C{9t?UcMw(*AU?AB3UV!^uZ zI3k(ASgliQhU?OYMYV#rlTF==1p(wQf8A2HvaXXiCa34-tw}wmGxmlJGq)OOCR#iy zX{Ul?t&~rW|5`8vwXBGObsN-#ugNP0MoQwKJ$0?0o4FC!S*B?7OLS$v-%IB0&z0lf z8h;9<7X2WCOn@#29nv=Uq4_QGQ;JhslC$q46hY_0?wIb(@(n0qbR~U=PE9L4`jTo; zjh#vEpY}7a*hE4W;nwL^mfEtZTTH1NvJHiZ}v|M`JETfFd2sP5Aqp1r5!cJMiSELmu3< z>*XEeVlq}s4!%%SV}x)+WY`*^4;s5M?#)T~3@v$&mwDg^z>5XZ6od?WZTJY4Rx zV)`tNi}{5*Tx+6Z(n3CV>Cl%A%POublCdE)+>8G{ zfX5uRzWGS3$UmrUq(2Cw?F4U{flUBru%@YVpK|6}0jZth_3zrHc%C_t78!${)RPM_ zaoFy#rC)IayGc@bo!$UlCLE>Ub{#d|4|Lj5ja&72 z0&tpEnM4yJWO2<|8g$wT8WTwrOfXDwFO5VIBi&0*V`Qv*-S50YdSvMb&Ef2H!|hD$ z+(t@$Oy0>~P{~oyXGJW>l1z4&)@|2$^lQjTwaxOc4tR>PY!59e8KicL=G}9KtHZEt zIC~LRHJ+y@X10RK%x#G`9V{yxRB^>324$iqf?%-;;d)1{X({=`SWcPQWTLw@ES`G* zay!|{l^U7IQ7I;Bq#U4*Op|_yMNVED%jBvW8K0x6nJ zt*~XVHJ!S`@rfrQ4aQ_u6e~1%_k3!?4}vHdPHR({9`VT0Rfx)em=k@8^J@$(QGko} z(D&+=MH*&@kWCLspl*akF(&$;p;a?@CGvl}0EAWK`noho%WCf|G_?vy>x61vgQY@o z`9lS}A4B&i3Gm5b=rEMO9d6k{T=jiza6e@_pywt#{bG$0z|g9SbrJc|SQ=@D2#>Qg ziB^op>~-u66)iDBC!X?_MXh)UllBuj+I_oke^1J@>4U9=Z2s=uL@c~w4fHgXL7$uu z45s#ZhmazyCieCf>%MSqfM|e^POn`d=MNG|N~40V-H^XVXtu>OH6Mb>BBexvuP7{! z;cEL6%FCuhTADZ>a*ONSd*DpC3|dp;_`JRw};avBIKN+G+sq6Dwlv z_%zt6DT-Mg(ilnIO>+G2BN1OuCLNQ}Yo-%^ zzlkkv>cV_;*K{Y8@RxOE6Iqq=ZnVsWQ;u0h9OlThXmP$PrNGzAnSPnZNF{=#G5y%D z=1cavW!l?X-L-Td_>t&@5T~j7hiNyNVngMNu1&S8vgF#ICl6_=@i|zqD$*VP3MSa_Q)%Be8-ja;JA~a0%20Tp zO7kvRFgV`I8hUj*F(j2KbP`SwXY^AH^M}`^x|<7Kx5TG)cBsv`bNgCo@cxB zqW2DZf-(%xeav{;`VAvewTQ=|fmk>VoF~HcUDXzVIyx(IdHkMT5s!g2jzZZU|AqB~ z2ml0iE!-j*Uv9p;s^Nomp5e zj@der4J+s#Cp@n|e5`l#aw2_z`Zi{jX6`s92YPspn6;S{A#tpGS*}2B^Y^&_m~ZGM zwfG`|$UVIr0RjgDgFxnVf}C43%3pKrXH7IA_%0-vO-|x-76xa@n=VztBnhJn?8dt= z%a*@I1etWc;UlVgF^H_j4Kmf`d!SYZ^(%K#4qenRy9`{1MDz5s4agkaVJI^<hJHh)=|8IX1iBC;(bB;a}a%5iq|dssO_7QL6+>8yPm zg?|(e+erGxYa?bChBOS~ADhBM*IY}G5Rv(^AR{M*aL@YUaYI6a-<6%Q^b+QSVFmx(Ez)R9wn3nn-MUp zOoyy743+vR*(2`DsSpZ6BJH)#-fBQ(jm4Ko&!SwV=rtV8Uc_^2Kp;nGx0Q&~*4}CK zW1|+r)0C7>G31F&I%1T@m^R=)jjNNycuX7u#jwQ@R6UP#Y>d^lJD(022+28hb3^h# zcRBN_>CJ z#=)p{4X+V{-Me1+><#jUmHmUd{<*2@#>w(Wk98{h-`!>ZABucGr#`EVJ6T*SB+BEP zfV>XOg9i;I#0<6yTO#sokQAjFJc)Yn9OT`m;;+eSa#Sig5rPUzj&Y&g>s1MfI_cCL zjW53-yREQ{{1jnPvfJASog9hosKK@70Z*I8>}x}Vb&Bh5p4CO~;|um4YkPeO4tU@l z5me~v*6YUt?yH~F@vXWnJq8knEqNim=WNeP2ymL*Xhm4okb=TzbLLwW47HZtMlGB# zDPfR)ib2&k5tXQsk#;gf&7wpWU~#`sk0^MPR{l~_>7tbzYTe#Owt`d|B@_~vznaV# zu}?T=EnPkskbQ|Gjh(x%oSLZQ6$?ZF3%8M)4LVDW&@%y4ZqSrRleOKk^omSlYo0E)C0An>BRp@0J{Dj*nzN?RimdQ6w5m zN!^dw)%I9cjU3w_G@|Q>#BQwciEEV$s(J#x`!yp;#G^jZr&SoJA>sw_L}f=kaO0Lh#hf3MxeRoDE@o zjpF^6nas=re~!eU%jgat7!C-aN0n&DtyN;l&LBclThUVo1Wpt32J@xu6wjWI4jG|Y zuZ;0Yc*rze?UkvJ#e`ZAbD%9I6tl1(4=-@NP`PlsN`pGS4~el?U}P;7hi+^{95NEN z-?=ry)|4<>G@q_Aj)6tYZih{p@z_qrpTP?YIl2f!($Riw$}^8(naNN<^?`f5r&=zH zkF1PR&c^A_y$dx78;*yCsAGA1Zhs8)Q9ky+%pl*~AT|*j}Y1EqIkp|W8OT@iu>z*7-GcJvqhwbRK@1@;LD!a0UktE?=i+AuY6 z!*IwvOIN$X;PmPy7gQr4yH-B0ml1sojcGSJ?oHWBVmt9n*jd;~~ zvjM`%AA266B1sIn8)PWtCHj zmreT6vh7Rvz`glH<)uJ&Tlf>-!vEEQo;YQYdvJd2fVJ~FV`ho+u6++9=)c#@;{iUk6I$e}@_ZO2YDbB|lQc8k5rH#hW?$Fq*W^|9n*SW-B(>f7f zz{G31Xjg>sf6u>BsaeehR$G{=E(Q4wrV-uX5<(@8d21l~e^QnAwLi-=7?V(B-O1*Z z8yUwL!MG_t&x=AV$DldvQD+IyWk})wQdrB`LKT@H%RJ9- zzg;h%_oFhjkArbcFm9#5|Fi3lM0*8k*6}T>jQtaGFmKRG<9_sx*`l=nYOR1jY-#p^ zQlga2%hH&ICkp3fP|@n2h@wfj&?91LW<$fY!j*b1x1h$7Swn3ti~%#w!1`83xOuU+ z>-y@U&wMaESX|RNCG9l4PB&vu(it$MPtKf;RMr$Pz7)!kE5YK+lKXk7bMDq`@vch9 z7wV{6edcK|T3-Hy3Y=?um>?IZr~;1SEN~|Zerco?Iu)ATl`IR?gVp zSDjeN6eKyjWAOpu6yV?=QB_1DBSc)VQDO;1L{J>!BEx+{!$VC(SkRKh-IF6pxEM4A z!M`}JZfGArCcPZ9Iu)6pueSYJw;LzEIi4xrDK0B3D=m8>5RodvLmi8(_443WmT;}u zgTTf0i>rzKT{{X+VZv1wz{3fJt?zu8Lxwm$PGT!Rb81|9BKrrXWr(##QdKzd;&I$&bb;i{- z^{I~WK~%hnK32byi6ZRDL}WlCagK44DtsQ2L}&svVAGKoj;BeGKIC`v1lv3{^M`B) zijSK{S>mujdEA-QZ~P>{BjV7z_Dh39(aTB;`{$pmwq;VQG|4qRK@taO()0I_FOHSKYJ*>ttltWgYsD(-LvKf|PCiE)IMaFoWxkXvH z?KVVR?F*UIN3@7UupUE}p}xg55Zj?84{@l?I49VT9z3y{*?sE@uuK7j=*K0DY~k?9 zQf&x((l({IxPFtS4(sJ$8|Lz*Mng=AL04C|TtbS{2qr5b8bWR4dp?c3~Or1)`5DHTKQB{Y*YR~cPLwn1lc2I$Z%~4cnUvl z$keG466&3JFR;SC5Cg zZcrxON=rf)C(y^pS0ARPKy;)D&TVRN0#{-32ynNS<}rrV>#4ysHM*EY71K5FUMJcc zT9{bB(s1z-1tfX{=JwV<1y~b#M1&vu@<-rqX@rsKytM`TLTIXh;N0y`OZvf3*~!KN z0yxL558_-`vu+1LTn{F_Pp}|3u3#tWdf41S!OqHa)UlM!M-QA2_usq{#I#H%qeGA3D zQyR)i;vb%iKu>CT4px@J*_c--}+@WT&8=xr)e@>}Mc&L=$Sh5bxk zB%~7qOp=jN)T7u&K9>;{2VCt{1p~n)9TZ7#PWDzA)TDte8voF?so8#=?T0yo!Wom1 zCTPqn_8cQY+$5Mk9*X|*!j@-j7X!>IqO~@`JS(weJvj%b?9MhFtcLRg_AfcCPG0ju zBJhT-Iw%Z?;WQ=(5He8|ZtKQn=ggv!TkzQV(^gKe+E3CY`5cR;sK}a(H`uGt& zgr$TQ-Co6&R%6m{BMi|im)bEGY7YdQ!7H7`>M8s7+RLA)gtaW7`XImFlt`nk09X6( zfrB)#{qaK8{yFl8g)evq_2%0L{(njwKvgv`(Kc|LMFpcKI@hw^eX@$}CWWZgB#wu5 zNZjg}`uDh#ZK8R2;@=~ZpbA3b1sn)Oh?l~t2;X&Ff+*vzzid9_zC_I;Kag# zXJEYECUQ+LnxU0-C#38p7#_CrpiFmIfQzO`ql?Oyc`c5QQr=07$L%)e845>$ ztYOpGF|s6Kx1Ppe2~A(eL>xq!blMRX?&apuMK_LFDno#|2n=qc-DXbpu5PBuk5IID z8?^W><0NK#@?leV#9ZpOE3%sYs|qNR9XG-ST}* z9Wk4e2)5j;RfVw>AtsOs77?EvNV~T?@I_z}k*4}GHIE)g?UfELlv+XI9=DFe&#NVB zB>Z_sgv7+wun|pzj`hzmYhe-%0_0D_{jxN{F>?l!9uNdW*C;z%GG<;Y3fl^#b)iS} zw}Nq-^|DSlHZwIF6e$V+6X0e3XHTMKIdGn3m;(X+e zNEAuHB1*9S3JqKq7ZPy>Uvo=a{x$)koUEy-E~cGNmbQr7aVdkjPw>oA)TCVVne&-Q zhXXH;6gLr;@L>GQa^j*FUF=>N!*A8(7J86gO5!M-CXQXLJw&8>%^II97DsyI4j03h z$Zw~+a+VoUi5lNphe>#;blY4x!MrN#EFxj11;oRxxa&3Lpz>LE&Lm|*q5V=R zRWPW|Z$u36;2xgPZ;7E@*S`$Hl^Y4_@pu3CB`ip`mvTq7` z#jM^jkqUQ;_8|}s3wmASp`xR!63aEV*&=(eUe=SWrXwS*iVo>GAYfR`IUDFwgLHk4 zheql5P7olY9k*nr6b^pE2W(H#hDx|eN-3%A;hF{SFI0|Js(Wc70^SFofb;zr5CGo$=+A4jK^;}9{ogQ3W8{82X zn0E}N`?q;YTO%>5!C0{hFsbxpBQiru%IHQ9k|=mN%hB3qwf^EH(o;Yf3|DU%{|f6U zirh_zQE}v93k?KszVTq~E-_iVL)RSgCZYMlvbV9%-t;9WC| zx2ZbQhJp?Xy&^(jMIwPwhT_La3*&_Rktp4(quiIWxQM62#xY47R@cu26x_<984FR6 z`y^`F3pkXSY<&PDLnzRDZo49VWT2*{Wg>Cs`;Ke zy6*f&W}lhuB7}{ffyskf%!=ixmhiCQz|xQ1kGPs@PwN$d)P-1P`0KNg-(&`JPD-Db z6niS~3HG|%;Z}W1TJQUIw1(^yEL~ZjnyYbjce@Ks2$^N=Pm|X~F9=34i4#gr?0Au) zgOZF)M>1fPrGgotQHba=OS(jQT4FNER@fz?xcV38np|QaFj#^AuM(tG~8r<}E22ks>OPnrLAe_aQQQR1IT&*1%FbD+ zMOYph^k31=)AzybrcyiJQE1Ld#34dZ_$^5$c*F1!3G!?*3(bG6BF#?vS!m2Tmdj;S zA?z<4G`Py~1;-os*%uKHS;}IKT(}4*wZfNkT1_=w+ny`e*92?J$cX=xTU|{z&~Qwl zod$_B=V1;Io2!)*Ho8WzPT-Krc~v~1NpIJ3WpNTWca6D6{h>U#V3ZxZN+y_34*Y(pLX);s+P(f~OOCm6vsKRGiI zmo9YJ$k^H}7q@u8(pX1r`_p$li!`&0FdDopaPzh*pwcfOlfOy#24w#J9>Nl8j!*9z zpsG?UVi3&JZ)8K-g;28^^RZ70;o5oI<|Jy)c-*%q89y-ssNT>gDbepBOs-X_>u7?QNzL;{&kNS7y0C zYsc-oT{iHIw(E`yZr2qs#ber`>@=gEQ5U}?cT_wm!-4B@oy)`O2c)svQ!TgB1g0qb zc}%y2;Ec)5ARokF5jA-0Ly&V4OL&>t|0+qU0E6)%NbtYDoU(t=<|nDEZCQ0#Yo7kr z$L4rt>5MMoh2?2#=@skCl$^j5?U<-OM3keeN+1FmO)z@(g&@;1-KxDPRb}jp2v8D| zXsw18(Hx6r(fl50RKL8f0M4)EHvEoauJRp1f)_uyu}>el>yfh_IkUZvL|b7-GU?9y zEiaUDaf|sR^1E#n7G|519g)A5)57@)<)BbQ{;w|ve^3^!ltZ&Pshlz|UR|rywdpmj zR#qOGG#oKi|3`uTvv^)HU9S8^ZGX-x@!=jNa+g88uoS%Ybr(){nE{>P~VgbSt# zAsr8?w*9+LF@S&^!KekdNGklNbASI7B-#(Aa9kzi>Sv7q?l6cb)XGCpIjYJ3Tmv~M zoFy=cG`l&`It^|A?lA0vIWna9zG|BPs{;Q|t#0O!+0O##bNiPSdG4bEM8|)BIBG~a zO~?_t-NgRRuT1~$r1?ZQ4>M<7$Cw9CDZKU=fIk!V6?_QA1hA6UQ6%aHKG-{LQ!l3X&J(Q@3a5e z#J~^j?NNb>>n3db>hWKl`oI5y0!(=n(1P>9QV-I)^T06odkez5ZY ztwQW?m$_(rZOJYH9r+cFyvNA9VJ=ArfcV;1*3Mmzm9srL=y=ISzR#qu-va*Mt6o4Y z`%})PG=%Te@I4psCufDpYiV@-GMGc>4u~6{yHvG7Vtbs?d00(Q;~U^er86N2~w z*i6v5btdzi47+8>6!8{F0m*E{chn#ROVx`%7r(l7d+2;ypBi!0|2;cV?qu|-WorIAWH2k7l zYZ?Q!4M;X(YB&KgE0gyYJbnag#uma%UowL672pY;Evw7AE5{?hVq5A-?QZA31{l{9 zU}Nk}C_-Q+#Hh${swjd*Iq5s|3$I|8eYBO;IiN=H66d3o-Y{dWVL)QW(MOZ_kWR=q zUBD5ZCAZFAc=>+L@7SAef%xgw2kd4+>}*zS=(f#|OeN&aA}MBjJ5bFubSwG*2ZZ3M}o@(=nvHK{6773dNp2c*ADHB z)?dW+9(u6(Pc$dBtp29Dxi$9^wkGQUZcLwDa$=9Hn~YC8xJc0gukN}NINIn#1T(FC!emlqo=s{r9;=hc)ya>kH zs>xN_KJgt%&bC*MojZUophV_WXZf)xn=Hlxp=ny^3@dQCIkAc=h95Y5Za8#xIe?AU z#a#oSXj&L_qCREV_t5D-N-1B9GI>rfO}IAR+h8X@kRp48YPCMN_aQL3wTz0oZ;jm@ z13`$l$=B0K#<5=@N0cT+stL}J+3JP?&*iwQR@ZcVHyiCT{Q%?KIxoRqJFk_M^4tx; zc)U!MYbfF#Casefb=tGTCWBbx{oE_?EUexG!1*^k0D!+^wMSyB%fN1Y?wHaRdRh>4 zb?^3Z-k22R-*t#IC!zVo8x{1{GWm^h!S~zBM1mYQ?9&wQb!B0SrpcaJ|F5%AeoGvv zuT!4~zB&LG+YLWpaORqDi0-h|w%$5lhhbScS$>ZQbsWjfIfkMdfOFk18uua@73rL4 zEnHW1iZHSxRU$6^0pP_eOX!v!%*t3TKI1{w?8h;gWN>r^Zw;6HG4Icamvt~aq3ZAz zv@`TJWA3_;BLKMFeI0GbBQE+LjhI1l1jhLS!|fM91So6kh#sy$5kPTH=o*V8e^15Q zWaElmv|gQCKq!Lg>RI>*2$N;StOxyPG?%|D4`jG+hXEIzZsCQLEiv>B<4I`c+Nl{w zo$NyLQqbT{rL>B!=vls0M7*|OauCp_qrc`62#Mk8DbGln!=qkG7VhV=UzW1d7Iayc z2fs+!*1>e{QZgArhYclJ)kAq-CuE;W4Gq4ZCMvs=&ko0TV{WGoQ@F_1_2z z42NG2V_vDxNU+-$`zyWo=)}3?zXON54E$7YIuSVNU1)=%@LZ_tcTNRq(sU6PO{nR+VuCBl z!DzJH7+W3wl>bJXf^x_I7Z|k0jEMr6;_MzigpwBRvSWNn_NG$$fxo&5^*&+-;eI^S z$_BDh+4uFuK_2S6&+<_~LM-!OhTfY;Bjx(NUvkgh0OL0s;W>~%tfHrPGAuxQy+nQk z=`C@zO}JE&m){FK1PD}@4Rc>+00VV$Q+wf1=i5^!GVf#l^jMn9?n^}H1N`4C_eyF6 zGWar-BR1mnW?v_7pVtGQ*N%;+jsii-1O?F#OZGVMo+<1iq^IJmVL^x}X{kR&<=?|* z2WBo5qi_dRZU|1=g&z7;`wU9>Z zkb{&h>qx_U+edUhk9Mk{s*?jEWgi0faa5;p(xkRD&8bF;-krn@O-O#=b?54zUw>?j z_j@IDu`Dlpi67&LC0E3E9g{&H%xq8VUv}kRDOSY$-ev?|X6)YX?WDU1PsM$1#m93b zcy9!vwH~d1 z_(^4%k!0^Tv9|@E7i`boOXIM~cx)&r%jWSirujD0C&+*^JNR;YQc!f)eTPt})Y}#D z+Z8apj=hZ;LVBsaJ!Zch1=g9&*QH?@>&5-1K+TertW>Sq=uytRX&A#;`K8qV-UMa- z5;n(;Rm+#|={2AirJ~=0#VB8c$v12)QRVuq6}*AqViuK!@v_m1`{@`n?_7VhX>vIt z_h(kDfZo!s5^gb9C^81c{A8T+I1_{c8}WhOX=Ug;Y_nmrv8k^nvl7lF<2Nf6@K%=O zgK?2~q?Ucfi2@K>Dk-|@A-0;GP=hsR%WSKzxs)xIBi3hT5X}5LMXGMhT!V;zFzy+z zNZmzQ05624i+xa1+LJy-_AB(8J-E7DRSVLb!dMR{t5Yh7$Q_fQhsTR zD5yYB=j-JGbw=W-h&g~R&R{iXx_*_LlUG1kz+ggQ&BLrE!2!<`sge{9`3W4ab~$?l+>HNsweWs z$p0cN*p(snYcM-}&;YG`RL`9bA{23H%)bC4U9j$=5{FvxG5>lBkl~xiwHX@$wG0)o zQZneI z2;w7armgliq}CiY=BAu5F*Hs~3pE}?0%?Gr_B#ca@m0pQwp`jB<`%P8d&NhnFVcawwH5M`IJbSY1h$_8D zajb&*DuJlzR?TZz(}2Xy`yL^Ru|s!Oo%dF8i}kytl4N7;^Q1^B@7EYXq_2!+88JC1 z2#5YXGHWgs(qzh(W_WcY@OIOfwVg!!><-tQVVB!aDjh0l!Xbz3ITaw$>aa|v=TJ$7 zQ554WQfbYa-^=v6&2$Vye*X3-@D^ghn2%4ag6gZmw39a;=&xbfL?P3u7~vCN136rQ zg>^#DVGy9s|8gX+9;%;g5JZLEZ1-Vdv}Ov#C;LTzJE`{v%nJ2Bbr$3KHJ&EL^!jI# zcs4ty&$|y6ktX-NKF@4^ToI2^0@Ye;alx zy~(7@vO^y3fV*+8BK@LGO=}B^^DVWiGy*Z&wYiQboTRbb}DMxm^}}vQJpB5tq6)U-E$L=>^|zRDDJ%B=|n%DKchAw zec}b{vdEn|WxKa#kb0^ieyVXMUl_log3R=O%~unqq9<5HzD1anASlATYswf_u?_@LeWu=Qcy3_(#mLn8`JbF82ksk z-WlJPYBKKu_FDD#;eL^5j}@z&v~IJDw>$!tAYXX$MBgT2}CM^jK$ z<|Ph{YEvM%y>#i-c1Mqx8SXeG3GOMd@b$u}$RJW4HP+1_WowuQ>()1;=$P)X_Wt}; z8anLnI04*+8a|_iZ=<@18W>Bl!`GUSp^$$#S-*IDy;wP!c=*5g`pcj=y0C53#u?n* z-Q6V++(~eE5AF~M!6gjtn&2V0yTjn_8Y~1SK!OC@J$at@tNQBQ_x?LoFf-k&dseS? z9@kaLHaJvleaWuyJT##K<}ui?a0s=ov#o9yw?xxT1e*~O4_39h!}*2JxOJpje_s!F z2xXmFPbE_kR2(qa?k9NH!Jq4mk{CDOKZYTpABWbt*3y5{+%s;Pur3W_F_X6mUOHy< zvN+}E$3c;frNNI7a@Rep_h0)kZV0qRqWimTUhluD;61+f(i7?`=-U9J`J85UpQh^` z=b;#cz-t;$)IJkYNP(|i-QdhIC}x*K$Eb$}`;9uJNzH-vDuOO#m52N1UK&l?Y@Evi zBVK9BIwzV~m+_{N0B7#8NqwNUwCVyF$GmJox@f8q=aSCZHdZrW1(X?1ONXLwZPe

J__F+3JDl4DGz;x6ebW0SFe_dvutcp^J zjS42ZkxZ_eAy~?TV~7_D>3A2 zdT5@EgQHuI!_?TpU(O8qG8otrKiHqGhWM;g?LknA4FQd`MC`ALW(v(Wj9cI1fHY?n zoZ^BMKjPi6J|iMs7v{Pb9Fjcjq3v(?YyIsvb*@FduP9Y*U5C4mLyV(f5pONG%A0X zX#D6{iY>Nif$aV@v)QWr*H@dy53lUz&|>1Iuzj#o# zRpW_^(UdB&8MhTd^fGZytKW3DkVG`bsPNo@IcNH>L*A~?6f_0SFQRMIqE4UH}s&sxCwQO5LUkUboEery4NBeHW)JKRzeKsMA~~O6rpX z+b?eV4a{+NOBm7oyt!}9fpb6Szb0jDIaXuz7dmqzKmyfZ#M2xOa==?47kI!JH&Ezo z@5!JRIQsw4y{~jP=pD0&T3zeqko&mqzAQ$^KsN*9a*{IzBcHCG^ffxQsP7A!_5fyK zIXXjkcH7%d+KRODkliA&=$w+tV z(92e{JdG>DatrMpDj-YyHg?0y)|CoQO;~~K^51Oy$dfqderW6!gy74dXnm1gt54*0 zCO=!Venkt}@qqq>s-`m}Ma`QTc|_Jp@eNstuKZ*hhqc2FgR@H-yL1imE{fb6GHK># zkPOTDETc{~Z*Izwu-3sUi_|i!4Np{|irEgEX3otuDbo8zZj^$0oN}1O03~N~ zrmya8H^U-V_BT%Qw7ThtFFa*;_O%&9>v1dwR;j_ydz$h&o_7Sc1Vha)ZNi_>m2apx zcrHAKr`FxD5Au&ds;M)7$C9{XJlT^6QRd&miFt}B&_4CQNNqH}Ylz{SrXdKpYvsum zI$7Sh3Wk=9y<6ug%31uJ7=7$L z9=J*jl__aXpfEk_#eJg?JDFx^#t=fY@K-MT43+A0mNqQI40hVJdbF9 z;8%5gPD7L{BV7mHUy9t&tBP#)gY8-mg%(J85p$DnHIt+7@`FL*lzc25hsdL}D7uXi zuk0HwRAfc1*!QYkuD9(dn#MCFUXO!@xQJ^xg{dBWZ>a@x^n75Bftgppz@bZGCR6Tn zFR#;}#`sxT*3O*R`h?({gDJ2hqG65SU)(-^K|YqRo!U#z4c}}M=&)wN8~ii#!VJ}A z#x$LaQWBU+!K~6ppE!*$%kxGNigKG*j4wv!*5b;8-yGd1w?jG|>~t;=-enoSQn=H4?p6(+L%v zRP}?WZwj|1wjrgKCdmHTW?mX@a#Z&5Z&_c&+8GwY?Q9dy;o4B;nch2P);y}=to-*S zB;W)%`3+Fy(!Pf8NUN@k4}$g4#ve>%OE(DO?M!eMRgQyw=7IP7+`7JL*m; zJqLDSdU$!6LHMuC>7%$qQ`&XYX5Nh$MmN;#+6^WiJ5@ULPW9ai5^xSTzP1jjNDr2V zn$)FLgprXGty1RQ)flL`wnxFc*Cd@PMEyCHla}f<4~cYj?2oxHI6l{c29bMr;Ng9h zdsg^xxyQjMzQIH}@^>S8I3TM}wW-gu&dd&VrQn|>cgheSw2~T4Q6@ zd?9JA1VWgU&Y z)nO)GTKr*{eGvWXm$k#lB%3p{cReIWN~Js>XX;G2{=QQlwa@+#{)@?A#QGO1mWA%U zIEd7Bj|{9wEo*CeQRX|8w3Z|S9#UCUgF-_pi<5F792k1+F9dQ;eauBi^_;7pcpHu5GW zEK-$|qkb<-&pS(NYjT+W$1#Y!qgsv1q!^!0kxx}jk)H2Tqdo-k+E)VmWUAFciw-H+ zT@th<_wpvZ%jVoVgf_2?+7VZT9gk#`pxpYee6BTp3Cz6=pq7cn>mu2!LdopT*~QA+%<7=Ha6;ee>4cDl!ktjOmvXR zE;d&&ikas1M#K0ThXM`!cJ#esCc6bA-6tPujpg!cB4 z%7PIx?-^m&RvOA>^q+wBAiI!}5i9Yy1Oxoxip21}v`uQgg{s_ztS$`pL1!VxscroP z&yi-LoQUBQQCxdshHwh{UZ)I#hj{fd`Vq2uZk7+LdWGz*)>ka$b|riv5&Z03qXfRh z>(`h(Ru(illYJ3#I{WI_wb!JY2wV=7FwKHtW$qb8{P#%L3pw5f-eJ&*z0{^15QV%% zoi);|r#d2$*T#3Du5(_5OFA)LNm zwLe~oqgN@g^Qpw_Z%J{ZYNv}i2YuT`y=IjWVpP%yFO~^u@V_CY*UXAi-!ZK!-(1Ci zx&PW#96O8;)5>7gcDSdcPrfu}wdZLKm-w{RW;H5Cu|k;ZT(GYXqXC9jxs*hrr>a5$ zAD2uRj?dHLxPTizf@G40&|T-yF&#Mh>#qCnXNZCIn!bIJWse1k;33}NteKOZD>)Hi=o}x z*J;G`?bJ`>YKp}_Fp}{U4W7?xwqzJNl;8_hvz#yOoqI>-{aquju4X-j!&ac?9Pzv@ zg)|6>cUnf(LNU8%VLTNEkyv(wrWo->cWXwaNNfwKhc{M)@tSnthtuA={iw=nM?6gMXW;E@HW*0dt#%qJMVC;;{C~qG`D0Qz4_n+< zzoHy=Sh+-|=Rhl&-SRojEChNaWs+b0cmt(vnms7gzNQt$Pg&n zZ3ztyaf^`(6q#CmE%_ex#jCKJ3uR7ek(ZeR{W$D*6UVo>--kA5-?Lf5-`Y33EoDjt z28}xKH8;=_X^ZiTe5RkD?A^JNSKBM$xu?#xe7y$fp(l1r->rgVj{!G*dcvBEsdYWul8Xc5VmojWvNoA~3ZKc!WS znr~I%>1>;&qDSRvUEQIZ$lvV#9JVx&e=femvU(NLClqw~TUky3^*=X#J`-%(RunvZ=*8>vAz|2m!~TtXR9(|VGSQMJ-h7u~ z{rE{N{h>HHy24sdEe1iG973(JOI7Dx-V5j*J*9BQb{V0V(R__3d&P&hKvr=HcOQJt zH~)N=DXr*tVCR%1;wE*_F(SulNqZDo@v2ntA$1X#u@*PVAT~gCaWQ;@#*;YLnZhU`yWdr^(@9Ofmh|$Hs5wwj?Zqc+<#B6ifGW(P$R^`YX~9 z$x9@m64ZFl>Xbukm$%VMZ!YdhiMl7R)?IbfwVLBj*SOU>d2KrsIV(6j1VNiq4^5Q} zIG@iPQXwi&$|=d3m{Z^^HrOS3uNyqk zmVeA%U>UH-hnE3XXOXQr@ zpR2JKt(Liy9)}1axb<;|KYce1hkQM?!n|ipj9Q0kSLbYQz*T&7#pyUWF<^7Ggj26I=mBE0HJkxIhur>T2B zYf^=(Hz3OSot@Uobj~0K7efLA@;C9ob2Z92iC0L)dlEC%iX@ShrZ$eMeqgDqOXHV6 z5{}|-3LGZoZpfG&YTsXBH7+YPXEH?)L|e7uolWQh@(`F_#owyryrJ@GohL$g7{>>j zxA6p~Uh;3E8oq-_jGnq-`y_t~O6JcHTiHmOx8MK<-0tV^4_=`UBs$L5**phsGb!4Ych z!(cb$r@9^KpPx~g0qSK(((n?*ejoM!s%?K?;fqX#ZtrHT9RI!jDO;&WMbSqf8=r2b zNvodCf-Rx^3u)y!d5R1Vrd#=tU9e`%njbZi-G*XW<_dkou==_ak+xH|9xGOaezHac zu~0Em$6oOHMf^e&*1Yfpu@AOzE2huRWB=d&+rGaFN-koK8t6r5ycvxV zCNBO*{Rp&soaiijO9eKg&BT&s8Acw!j;Yn)&qTtT44graNbTsfaWKw!r{aW}X=!w# zouBbcrpY*3az8dXGUcUG$Hp1PZl<-9Z-pgEf0+Lt&N~#zFH3G#sDpZ!zj+wd1$p7a z{F`p#SpEJ_EdaL;S)J5enOaX}Q+Y{)h}RP{f{X)nRw3a|m{w&4M>boa@G96g&hlbuZ~P9qa>hLj<^JiHsxcL`|=aJgms-k)muA{1?P-#4A^JLm!4 z#OU+?j5`xEpF3f*Wsnf%#d3dXJa~w5rq*;&R>jb8Wew=p%N_)JZLBLt zBpGUyVY6eU`m67Ti@kuhB^rj*4=5)3Mq_t0_UIl(nB|m>`O;hQ|F{$WsRG}#tYzeD z!r6Q4uy=a^1@Lr!H}!d{cZV@;WdyY=Lzv75W4R)+3=fC!%N?~1#0|N&*Khyz;yafE zra^od16bqLMz zjEv7&=4>JCK0*4(C%Yi*424=WHzjA!A7!(n*uVW1-=7q@#NRcKqQiDAFyfl^2?kYh zV?1VB#V1A7iQF*pJ;X5OD<1tBIc`*q8Xa^*Y>u@y67Wf3TXK7jM+kr|N%T%^!j2y6hFv5jk!F>#KbzJgnaiT@B;nXZcINnF zDZg&`&?V=1MArSQYX%edLyYU7Gz$Fm){a~^C2m6A54L%BbaUf77|yk4r(DPlS3t_0 zF^DL=b~q_ItENNwEa7s?%pUntuu?FcqXiy@iHwE(eArTT5$p<2s4Q>6Ly@u%^TS8} z62d&?%GWhC_l1FDr)`yoF;5<8$WYi=<22*ci<^swU%yUiw%upWWKV0=XI4gGQ6Q~8 zx{X)FCmeF^lJxacvXNmn3YI4DxOup(Pkp!YtJl9jHF455Gv34=kK+^{c;I{z3GDd6 z*MFIXo)RsxZbke6P?Q5KUaze$_#FIn#J?lbUO!sB#)3p-_!P462$v@Cx9Qsnb@WoZ zcsRT7GCMO_ZF*a{QYP?|V(K~>&;&J(qXv;x;I7EL+surNIkXs}b1({CbGLrLv?`>_ zT`#l{^%P=zg^OZ2p8=m?;Nacx31h0SxsPBA1L3qD+iSEd(=$CGN{p2_LHt9a0(oUA zMGMo@P%Sf5HU*9#r0L#bmM={#2-cPG(#AhrRoS zwre28XB*|sMAW#ea5_qAe#pnS-OGp*%rQm zjv$+A!4O29vxyfcA<}6?#;rAS!N-nSTSvR?FPfZ6P>53cL-03Gvi+?f=;C%9u0dkUW7G;VO>+J& zfRw6%VzDoWLXN3d|KKpn2W2Q#(NXxCvc$i0HzTX7d>JR!h)LP=R)vxqv1g@MTF}Gn z&%UmsZr#6ja);1#Vp|&!S9P~G3yL+2$!T$UE#2$Cygo=`d;RF``3*oQbsl^bJAS4V zt)F!`&c$hsJgvC_f)eRGVhRux{Ld-;JRfjG@U70#3rZ5`xbtkNF%Tu5hhx{mQrqf) zq=VTi?{dLDPj%WK3X-MQXML+1NJNdYlY8x>kF1n$&*e;)wgHtg6u%ISiSpFFdSq5C z7HPGkrT9;!C~*{HS5kM&p>{Qz?d6IaK1tukB7ye>2>a^PU>!em&d-ks9o;!7CCi5p z%8IAp4D&>})louC3ahdpAzhD@vhD(e&&!QHHGx;hI%c1n7$KoTKjT^&K~=c550YB> z$RZi@BvQjmEnZ^X-D~0LCBUn(1u5lEN20Z7$IwM6#ik7)=9!hu;$TEe5*RYaP>hh0 z8xIO=9qk^9!HZk^#4jLEGhk@xv0RBYv;Lgl3gk8QPk60UPtO*>_NDTRffBOKdy z&f}0e#9&1{+o?d?D`tM{XuP~ux3uehk6_mS7?oz%j0mex2O7Lc7CQrsx*&tU4xPtq z30n$qknuHgLp#!$M6l8=3Zi2 z$`A)R`x@t3#1BMGk!+Cvi8Kz?+C%Z-8}zi^CY;S{e>cARdB@9VuVGKf*_skF6qhTr zmfk}XBpIPP9ao{l${Q5xm1e_<23}_PQ7=sh$zeYlCsgO3w{~wp!S-7)I-oblxOCJi z4Ac_n%v--hzcx@gPlc@d#V-CVR|058IkA@#>IrN;<^@OUg|MjOVNpcyfOvT9T{0E19xFdw(ph+ADdP*IK-UyEqRzAH%k}7-<8d4P zvl6^&8!AD~f`l6*B>+^RruxOA#x_xJ`IF}Oc;9#vp%qac;-pCR5_&RRxjsA^Bg3p!9!#x8n<=2xR@9g#jY4NWSrc*>bG-Qrh9 z6re8#-|*(=YT>K;NblOFWjJK^T4?8q#NO%ex|v*;yNrmL$uF#A8I0t_W0 ztGJIh4WG~87o9tABoC#q`bGKEJWX;x&&8%rF&yOfejIiYiY3 zy*(YS;xkYjBrppDWHJ=p;>nF>ObByVug7-@Y`TjpeprX^ZPpVmL?uR@BN1t^Q zg@Nltan6liUvadQGR#~#A&EC9gTb89=d>-`$NMsrev3QX>k^G>*gfS#SWF7Vgnv6! zz&z^v#$3DCaz1Z2eu#QaYi%aC6L!Yud>yufL6FqKytoqtr*fY7T8 z^n>y~5;ej4^sA4B2hf-_b`_|h%n6$nA(;FaE=X*KY+?efiYV13X1PuFj%#?btBA($ z`du0gvc4g0Z|&83=nD}e2KmYo=($FPYe1dcg^*7D8^jp1Ui7f9;_1F4GE;~*GqXlw$v^v(WDqM1&z zIfv^#CR!utrKaVve3E*ml*=xb@Rr3y(}=S^SqensRRP<9-E$GHi}nrURpMx~UkH3A< z2RGJ291%*C8(wgjmA783v^jDWzS#RwetzEpyg~}R(rAE|kejrrRf8S21Nw?~WBm3M zo)cQ++qj8i4pIA@*FDxR0MOucEq{zjPbEHnmXCF0X)z5V(El&m5D?RAnj`&tl81z< z=4O37$i+BgNp!ry{buYx|KtHRbO1!K%arc^mz&wfrUh_^jPTdR zlK(<4`jNmVO{dYi{I5501K|iC@29-k`eMYC=QW4a#;WpPxB?%pj2#Z}CS$v1P5=3| z@@d_EtG^Qi!u>rdr)e!vy8{acZQ${zu(eN$WQ?>Xj8}7U${!f@KQGH$9;g8B^du?2 z*blrC`SVhw)fE^e95snyDkY0o|M{)RfFo0hVPNzBLY@8>9mPV=(D{na_2O)*V`<~0|UWPMwjHj z#?tmS^fZTw`TTz#kfP-&4H6%`5Wuv=LfcxH{hiDBwtoz`5U3~t+6XLk80_F^+~j~wh|t-)zg6oe5~!q zIyLV{RZKm)dym$Ez8@#=?hh3t|3aH~fbGPKaN~h)W<~L$@3MKV1P?Pp{D3=&BRe}R zG<^mhBAIfFWaoxY2mjM94=cm`+w68Cu@J>v*Pp<;62@=`kgo=ufYQgFp8?&-7RJD_ zf~fpNOK92iUuO80wHBLf*Xv*wr*#FipCQn?+oO6B=9RgY{4FEt1*F7lINmXKBBwsy z1FcG+O=p0^sWjPZ?N$sGBqk}v_cbqnYdGijW`X|UKm@>?R1(i>w-Y#A;=nh*wvxQD znvY31s2hfsEIEiB1t^he`UsDjpjNk@+`#qS{yN>K$UkeUvL?2>LOu7BP<-Ra^O&YD zR~K`}Z*It899haT1fH*b&*4LldL1Ea@u4nVZJgXpt<;~b6gyLDTahOsGiCbyQUl2I z!9YM@IssrTZvuDmc+8-W>NC!8fe}+@owWY_`368R86d0zq^G~(!nW0Brtchwvxx7iBMD-%qC&KhU&|{lr@|sSkPhv|R8qMzn z(91dm)NsLlmk%WRFLpbI`#K3Hp(U*$)ssP`e0fLEztLk&1Ry>2w`LxbR^~Q%>(xDB zh8#vfw@B+vK@UaE#F=S8$E!@tWKbnCt>B*1K>YcK*)v1f zP`}>1qTDN>#YOX-Wdd+gJpmTe(Bz<>a^V&w=5-yxvY(U?xPJ>oz?xX>`sD`rwi|2W znMs!}Bls?(KHf!fVEB2&PVjf)vV}i=L4Nhs$hX~t{WbK#mx%MX$(R)BGt@;i)w{3Q z^o*OFU$OOEu~osky9pePl@CjsBZL&hS{S2~j7YRLwJYk33+0yEgA)3j}QbG6Pw6>*?3&y+O@H!KiK zeaWYgRZY#RDTNIpv_QUmC%ba2Ok8vF{rT)WCD74jyn9Ziq+*xD)n8>ziqj)079cQy zje}9a9;#8HN^txWYZ81!^Fu>tg+Xfyhyyx*GZ7;ME_1SZ%sD}%wVtQ-9*(Dei0r<( zDSe+P)q4{aV#^>Pq_!#epi1!Xdc6nSi`}nLy*;4#Q=$*{6!miZQd1%JbvCjip)kfN z27~00(i>$l#qp%Oz|Ef$x1$EZB5q>Yg1nYawOj!LaUX#T5Q2$&x%t3@C>{t2; z(6l%S{9zvuO&8I= zm*h`7nD-M?9MBI>FQcF*5@P_;%#Sh8wsLzFO19;%p|BxomjEtlQ&(+R$}F8%km+d; zb5DJ0T)E=zH8mv9Q-bW;qQ4lgA5&}j8&?20*jYyxD{X6^QD}!O6fvnH{}QZP3Ln?kkjtn_bx}>-#;)XDchI=*DSs@25Vh$G$hu z`)^RmsDp&GC0>01$W?<=QGj@j+66V(>qL>g< z*PdOye<|Ge58OQyW+2N0a_5q%7$(sb@3wIhb-4Y%(8x4gpiC>0Z*zhdWs>=GR8E!)+I*&NNLtG% z6)^+VuVCbDtkPmSJ~kaS|JX>P{&hJ@2YztAdk{7CAE4P#-INMyPX6g0y0dx)HTQEc zh4Q6vG(l4GXCfn0z3A0Gj897VFFn9d$|=iQHJN zl%qTodc~dvL|~^q14kmR_7X)@M@owHL6pRwgQ*9XYHSk0AWs?+^J&(7!HodFBy1HAVV>Fq3R8aFS&|gjxjF8zDV`Zom zImGk5v8V3?;vCM;cxJc17s{zKC&4L?eVd{O>h$U6*%h%Yq~|aOH-5nV!x88fk{R=l z()eJHY!9gJe>Kb`Ti`wVK+dK$z*BvG3st1*>8mLEwx+I84vCRl8@QD{%U`iE91zvox*I6jyiEAmX%iOam}+k6*`Lab#;=jZ@$r)A zBhN?+vfbvXJ_O5@&&G{>y;^PH?^VLf zUBc24J%_`}^T&(Zpy>^#`3pB{-Ko~*>ib%y!R0mZh;<|x#>z2K@H(KA(Y)ABuh8J} zM}KRV_1m;lM6g?bMus8gcm%$;17V^Q#~&Dn61G;CWJsrgQI;fcR_|i9k3bSq_AEUo zVS()mSPhAcWS!lF_)YHsEZRqQM~<8ka}!kGdqtmg+(k{><^22M^jFGC>lz-QSQCi% zL&S6^Ftyek!Fue)YE-xf4^2O9OqWaNyC77yNu4X~8OE-Rt zbdBp}Nlk2N*bj4u;p!)|Hxv*NH^KZ9Hxb>3o2%$WSi^fooyyZKHo0t@9efmBR%Go5 zIW#^})*CBw)0hG@`Ujq`?yDGx0Wzbr#M;Xa_UXi)^d%Cb_F>N@s-^h;9WN{lEhy1% zz!dCZ@ms|qba=l)adZ(!S?Lh< z)Ub55TTOIdNM_(#pF=k1?cn7go1ia~9Z~5KEl@Ae(}D}kb z3?q^V8{6})B4;MT;LvZKMK^TDGCFieE!TVXrggE(B|_HdEa2aYaLt44HDUA1N5t<= z=eDr#C?g7VjEl(eVuFUM*4|^EefeTYyk^M$wXr+8z}5P-bxV^C)+asg9wb&r(kqP7 z1+@(IsaZC5qWL^F0n*xLjB5)9;v-HBDpc`MfqfwN_j7pNNlsfVUY>8d!7Vdmr23Is zpe9qCd4JQ$lBXcc*FH0gkTAype<&z50mr=Y_*8fIe}N0WqQmL885S{mRg@#5F|keY zEJiCNb)`+C{Fa@&wNh)M0HAMU1%JICSr7L_AwJ{J8)tAwLD?C!%AJ!L%6j80(g96j zk3tMa9ZZqVR@m#NLk|pz>IiGc=kY7KtZsy zB%BXHf6d@2Jhgwd3ipe!K2d3(Fs0o8Cb#Tf1M?y3Kk10U+E8FpE{wd^)hu(X&g zh5KANfiE2x9%!UkT@~if_muiAOj{K}!WiLgeNC<4U{9~PX)2$(_Uf=rNa4QCh}UIU zDp+0mSM5b;Vr4>Rab5Gr@ul9~C4!D$?s_aD#5g!0WJgD#KOl9>P9>-~#;Bh1D`cBfjWc42V z6owXH;e#+P>lcpVzzjEr)}@l0>?x zT6t>{Us?@A43vRvN*p~$_Xc6DJD+bfYEa$|BnAq(vcL++j61sD2S%!Q6t=el3wQ|L`TMr zVJ@R=+doJRRui^ZyA@24VJCKIkWK|V%Viu9K$U6lcs9kt)u>3Q*EpH9DzP%l8BIJC zuEf7$tzSmg|6N92Lr7Sm!x0NR_0PJ^!>9+af4+LNG|NKo!p@)YqS8u?j_GJwaw$(k zUGGe+&RmPC1yW-tL@qZ=+o z3I>_b|L_+6en2Muql6qSS=xBmt)*jgaz<+wBjx{}tTdaU4Q%9DX0svl6ft1j#2mWW zTHK(D^-m#NT0=OTEQlPchNx(hXA^5kFl{BS$ohSpZtKL`;jy^-Q&%M1s+h!gP{5tS zMow*Hj8CYcN2uR_CWLzj$zy{;Sd3!lkBbztC4H<3P~MCHXyx!E?SS!!ip7W8Yxt_+ z5yZUIIgsU#zB6$RH0qikrm+1?xB|(v)Stm;-nST+R_>-*y`B6UfsxJf>DXF|rx>zw z7}+qK$Tv~rvfw6eMFWc$_PK9znkLFaIDfg2rOr$*B6C|X``xx>nu8c;pSh`RF7O)B{uyAWOI;kB|a~rJ!Hnlf`9UrH0fxBMdbecthXxsBfFHMh)TnRHS zirXH}2RERg6*NRx5@))U6>(`x=RD7rjPZn_$_T$nSr9d}xkKcdbGdN?uSn=9Gdq;Z^-(Veb7$)5|3jZ-co*m9}9;sWkUZ!SVjYKEq;WdDysVE7^X-ud4`2 zGXpwfLa>&{Q6Q^V%|8q(F+TCrtbeq^1&Tz0@b@!Eq=yd>aPkU|W!?mW zcEbkNbkFjK!lNY>L9~IhR<#pZil>U0O~^7rm64em?7K%d(c$j1MT~XhgPDbFjAhig zcBax2jwgAMF&D57P>oV&QjB|KfY+*70n%`P8HbfCsUoK5p~#eqQjiHUwldsPoj_6~ zl~qRtnqbJGyu^LK!}zhDY6aqpw%0Ev-ivS|*M~Vkn&CKEeiWYiy}&jZ4Q+tpPi7|T zI7aNg{6?`i9$WMp6B*9TC{|^HKW7quAg$NhnjxHZWp(?C8doJNXlU2@ZLw@F0;11X z=A}p)>FjdL&ppQrCL0r+*nKJu2|z^@*BAqflpjc|qny_!IR>OQgWZ|w_#=m_O%~lN z5w1T^3^Q#uN2-$oB?fpa(00@a5nYzJh$fLd$D7R8#ATyD|I)rf4LiFTkUJP7p7@S+XY@)8*kZky|!!{Gm89Zt!`n8{?1A89W*jCkcJ~L@0`%<`%`8n`3I9t=h z1#%UGCI%`JtfN=1RbF#CBphQ8#9|gX&c33}k*TXs^xx%|T4S-6n%*a$;8+z9;1bSh z(NZQXPY~1=7G`_9^|j?eh;D&)?&gg>n#8g78|Gx31ozp&hHh9mj$cD^tvWL9D9UO(5UvwUytPQ;C$6Zz!^_PsOtXwH*2|Z5j{9Tp_wW>DUr%!i>IG(Rji^67XYBHc2 zWnGs8-BT_erjhWOBa)eHMk8%8<%UgAcZmKrs|As&jhKkaAC%7%@Op| z7ZQi-X2sLPS^UW%48pc*6{$r={Mly|+I1*AEoSVyQvxsxyx4eW*hTN+Il|}>o^~HN zOU8*N{uIGg-Tpmq8nT?}Xqsbheclf4l^Tbz(T;=ese|JD(wNMZq8!l#)%gJqtmVqI z3E90#{H=I5AxjM1DeDh&E9+<$o(BV?}iXP9UB|_WNN=P&6^%QA7isjGE}A5I-q= zb0y;fFInX)1d?|V9;Bn32-ZmiG@|DS9=X}SG^;g`Svb3R)9dIzj=(lb#LYH&uTP|> zJN~RkiV{d&5R(IwzIKiB5hk`n8UN(WwUZ+3i#dOq%+MH;WHI^slp4*~28DU=CqaG2%|kWRxb_Kv`?rS&T+HdsjRk+PZGGpU0PfsqmIL{6hcu_=jP@ zq&SEFwIM=)qLdOWoE9QI!)6pr7h)8g-I&wah$ZALN}Q9lyR0_uKW{Tb`oZ#FSKwe$ zSd$r=O#{xqFMg|L78H<>%IXg2xZE>0;oNf@Sc{00z& z&4WF|kTn0QT8(Yxj)}&te+I=#32IP8`WlJdVU#?@gY3mvr&dHe+h*|VGg8UV8r{T> zV?X;XnD@&BX(5^Is=ng`f6CO%4Wk|xj3ew<73Eg(qSsz{;@@UiLT zY_SsRN`JEwUTAS09^m8OAhe9Ja8_k9{)>rObganNn%#(~3luf(Rm_sy9d}0>$0{3| zkN*28Yw7szts(IOvIAbhTPHc^{n7f$CnLTHY*dQ=Q}q;|FrIFtMpEw;iS|PA1Bo8) z`C0xxA1lIq8MG*V0t4M;XU~(X+RE~R%-OA{wd;^1%9d^OAC=7N9FWD_D= zUm@-)>=a|YI)nG6VZ|HXAc$0#Fg+12r3HS}1M-M%eh1=*2KNpUb>Qq`ob zaF`C-V{)&33*PePFT=_#k`T>#>J5!8T6144#jqx_D@P6h1LMrSInYqZG-6&SF?=zQ z=Hu5WFPS-Mx1@WuLN_TL|-I3EABQeVKyj%JgS|3O%JPH39M-$k_ zO<)ub8nPkA_pzW_o!}b5#`4>VpvzyEfpl&CySG#cix&NhdX-{-a}b6lg+b=zrqJWl zhw&PaRmPp(Q8M7LViyeG4g@2}S`0%DZ_dz{?JjW>CD!?PvwSn)&`KU#VGQC7TU~P2 z)j1qJUn`gzx6%W4B%NE+i7_9mQmpd2#2{rCJl9WH{}?0G+U8{u&AyvAfq}~7z>#0$ zzO&B$vHj(HsVmuPQ-PWw@fK(DIO}9JhIGqy5Qn4Epk2V4wZ#vbvV-+FsN8L8;zFCY zzltXOKW@aIUmhJNSNyD{@)tbL_n?AR@7hPH+STeGRp@Koa5#%5=m+1cm7KF3ghk$+ z?B_z!7j*2WOc#^ah$>*KYCoFdM`B0llx5QJr@I;p1Fz+MPZVY4B z!HrEY%}>6eD|8to$FMIpnb?M+-PX$LH3rUTS&^}k@F|5m@q)&$_sFt~ZM!Rj6GkKloV>LD#kE_$&F7;r;18{Yaf$uH@?o5a`YZ3)N`4M8c33 zZ9kw#1PIj%G@DPk+?CC#3-2KO928OGGMZl}jbmwMFV$({l{yZyluu@es9gAv-t*j79Kd)O z+ZxP4gGJPF@+IW)q#)H0PkT~@C*o+$w^Y5ox3*mv z=SSq5y&ZMk+Yi~iM2Bj$lsg0`!rAFErMx)dUm#~zQB7)-)wD5v)Z3+`7q&Wba2E~5 zuuNa!#plKu<%8KRGtovBFYICAE^IbEK!Y(qyvbgw{EQ0|sX+u9OZ@0k5t3WvB@4D0 z%@UF90wWqZ1-!0~-;v*D-?1Mez9+^>=cyFG( zaPhMY<0i28w>|DV(l?uNf>GtpRX%$`7rf+mi7Lvf)y5I>&O~XQqziemh~C(2cdVV6 zQl*rC{lGBGLYQ5wUzVJ=IBce+Rf(0E*~5uPC$kmGc4&MWq2#BG2wPR^dCRg96Dv2j zUo21YHbs$Vj+?1fA$t)%&o~wYG~wn!MA>lndXJGhxn#s#m$8{2|TRIqrB{%;z5zo_)QS7WpM|+9aZe7RDl+o^HL=FShUYGjiku zk-Y^n0x@x}>D@@h=9i&gJ?T-b=O1BTA)3`+^b2U?NdK zhb|>m4C3R$Nnho@YFQ6C1b$KPU;!6X2si04bnBUlAk$zPw*w@i3# zB=5L}P1O+oFp5YAcjUow3}-N`QH_hbWv#WP1nAKF&&f&&(2hPHn~dLzQ_-7DlzY;z zC+F2dU^2UGjND?n=`CqN~lY?Cy5t*`jUT$|qemuj-{YG_Fyha|)-w`Sl z0R`DkNGPhD#tf3fgRpsL+i*X$8#45j>iANRhzyPveU8H=?ws1DJRqUBA;h_@f zW(#YXp#{9k_KgkaY^Q7^2aGlWPAm+m9^Y8+w;6*_Z>KMz6T_s|!^bj#;=5j2n$Q2} z&-nCbe+JNA`037umB(UVA^I!N@rca0LPa7`gss1%Pi@H;wECV=&HcbYO+Yu)N?PQ6 zrjvU7&?1F2RvrcC@JX z>{K$xUKt*Z5og*Cg27kL`Zgg+yrZ_B+3waNDFn_8RPoZLh%b$)$v@TwzWdi6jK;BgjEMt#iJjat5dG+NgxWNmNd_G5)DJ= zAi;_6%V>m<{KnPk=F6#2#OckS#!1`lRbi31eQs^!x`fzjBjAl%3>VsF7~E9fe!8IE zs~iv?YNeFun%2p0{}p_sT=QsVIr$B;h2eW!j{oq8P{^tH%5!heKfFIm(&b+NB$s-m zob`5AWD`*dVQ`7`dml2krO$IH$l-61rOBnX;?UiIFM;Ei$LvZXOS)QF)IJn@&6KLN z%;IMw9s#Qp&ik9vUrZ2J*b076$}2ZnnVA8W3E^j`!Ah;OqAzU#G=o*70U$PhNDIT4 z%w*OS=TW{s6*N&DLStmg2;svwt{TE=EEa#xPW~bD-QN(J4qZW6uKoMB^4_^??t{!} zTYG~EpG(c;qi86zBQDq%0O^CH$skTYl)A_LrO<6&~mXTMn(In>3rUdW?B7p9(fP~m7xAoDKceo z-mRNmJE!&}4MN0|QcFgnQR=MfT`Jo@iuK+mV*X)O;wSI-U1p|1Sn;96GI!*9fzX%+ z%Qj2YA0frX!mG}u04w$t*McdynX|h?h~4YGv&!2_jPge-Pj_rVVo>*qGZX2wKnWy?{?7wlAdr+d@yT z?h%Je1Tq8Tq?rywu&^J8p7yaIg!kQKiQw8MS{6BsNok=>hQ%+HTaO(3BWB|1g9w!r z+P#=OZ^20F4Kn?FhBAMgKXDD0Lg#%u*miChj|USbV&dTz0_1*fdY8t-eK)$Nz0%hI zX%lgX{>vu9_8&G8v2TOr2J7J`{2{DJq z58pL2)L(0IjA*?ou4H!AG3sPx$iQh}UpL!?!>{d&vdCt3;LRGU#p$LfL0{z?)pCj8-uM+>l&_y- z->pP*{>f-86q>D1U9spX8CggAa;zk?Xmf21JE%1b(ntw?K3e^XINT7am9i%0Wtwzt zn^M0&c-<$#3M!t@^$TWVU zi`2jVoK56UJG@{5ncoZVb|3uuM58PuN4Kw$>Ia5pK7~Ucv*1*s>Kh*x31`fDnCcC+ znd-`SzSO@;`9wPlKPCO@$Hn0Qm`3W#L`hjCrD5}VU5Uy0Z*_el$8!s2X(suv$I*Ea z^xU&8O9&UTSop-6QTjEk{wQNWa$!~sd?E*(HXM4z$vM)Z)Hfe`Cu3n~@zbh00tyw_ zpDD^FO5V1%P1<7^FQQ-{w%@G5kI$->k7SE_1@`?a3{{a^sEL7qK7ThaKBd>2PFP>EAC)X1f_v&V2gex)6h%M??Gi&HTvj+Q5IvM4n!M z^0P%Vx#WM1>9}R|W!&z?{+Ka6!zzE~Q&!|he#@0vH@)rHq{ScX3R$X5e6E`17S1bA z?1gHMGGKS~Y0DV;Ju&siO{+8MdHJ3c-M?9amc8ouGFOqVUZH>GF`Ibp*U`{~pnaLE zuHcbcSNUAZo4pnWWbSI7pKGUfVx(J}$sx|Qn&`@YRWhr#Go}8r-0H%zZZF$1Rz-ZC zD-PjB6sGCf=a~xc;yRyD`z{tKZc!uO!?osjV8gMvhVnd5JgXA5>PQ9j&K@EZ8_)hy z7Z=sPqTUadkx+B!#MK;=*|plWNlR-97A2fYE1X;nS6H{}xv`l|SKh3q>PgFg_VW8Y zJsDkb1=p{kzo-i~f-Ldk7vsEAKSXsTlPPfiR)!1;Vdm)^iqtqAsx#1>g&HGmN!p&A z_r-A%h}wy(&HP1O(B~YoL8%Lr&^6nVvX~aWrl_Q$TSA(BMiI0y@p|8j+9b4Xr|(hW z2ZS$6hpbu*cEpbf8;R^SODi6~zlKy1FVx&oCFmU{MW@0wacwj$|0xc!d3MC!3BZfV zRaEK@#_!G_+i?y33tXPC^cvhCQt2As*Qb;oIey(Yiz^wwr=I};S3H;x za!JK+Fhq`oCY`=pAA@cBIUl^C_X5L!w&*oAUOyix-bri5MolPczetUU4bS>MM_{)1 z#ZIqJ)5@#v3OpU^dSrP+dL~BTVyMuKoHav%G(`H6=_x)czF8(P05wH@cv$5Ln=ty8 z)(*LF-Q9IQ9-zgb>hg*Yi|_JwTVE&L_Tv`}td#t!f%eha#zH5UI3TlVi!r4<&O)aq zdcz(Te~S30!cQITypYrn60?4>2a$qOvq=b=4XnCt*hm`xtw84$`ua zb!Kjj76R6=6JIq1DV;qzc)3+$&aa!D@>V)3|HT|)Oq%cb$g;3HN6ZV+y`Vd*CHMk= zjNe6`LJjsxY2pjFCHJz3eNFnmH3omlLwX#j>UN)SGUbeuEXEOimXfE!%aAKmT5X(W zl3NXdWgT&M8CHq^j<9JiqUwL`@IH zDT*uPTK_Ch5&M!V@upKuQRlw$>K_kU4K{vV#(tRCbwaU0YaM(H0*erupzRzI!kNlr z0B>Ggk#_-R%g1ds$7>N7iT=C5*orXlFQ^~4U7a>a`KewRP{3!%R{sA%-T_AN{~IZ^HA-Cud=>|3c?XaaEg-fA2J7!;|_*;FUmmwJ2X12 zch!*be9~Hxi{%PRbU>5%0I+B@$h+>`>zXiUbp^bpe>St}`YnJfc3B+Teu41Slxsd0 z6&@S_7fw5YU*iU(^WFQ2*3UrB{kV_&A}Pe(|0`F4H)#0|yaXNuD7N7jJl6{RBa8Fj zL5^Mgi@c>k3Kpx+Obn^}Cl!0yT4;5A69;Dc5j4qTAp_NBT6fa0Qp14}7|C6-{q7P1bK{r+bmWS`2pK%5z&KXSVSdHjg-Z;OGc zic8FpyjN(lIbWpoIglj;V)c}OlYmJWUvZ5{1H$eFzt{GJ`6Qmibo4xBWsf^G8Dh8v zh{g?|IvkE>CY}RZie*FYF0;H{s#JSupgHUad)a~D% z5h;8s(#v;J`p_hubZIm+l8h2V@GKb0HtajS^|50WI&n_upR~FJsg&dP*w0j(UxU&8 z-ax^MTCc%>gB4&@ZmKMw+x`JoY@&JY>$*T$3oy9U930z*Ak4lFa^d5{yiV8PB+~M= z$q>zjA$rfUlQy3d-bP9ZZ4oZMmn6CcZQl35)agk?HSytMIzsrzk1`Q|yp88*U(Fx$ z0})%Ceoo%weHkSmtQfcxwb??Hywxf$&CI{04ms<zb2<40CoqPfYP}=#D(P>S z;wJeEIM=)c^(4rOme%mrM6Mtl554r#KCrYPqkomaN`I>y@~RKRwK3=O1h21_-5n_P z*DtiOJ?Q(hiWZ(B%fe;+)jB6ak6AB4VGsDhDiMSsLk)~qI5ehc1z-d-c;?~m8nm|H z8FT`j$D2ZSCN%n>wxmDAMH)@DAhOAA(I-WSO7*rWL=Rd955O|4FWSr)`FWoRs^F|| zOn@8YKgbDi>8UVBT|J_-Fwpe^cga39ECx@dA%|7X&HlKanTQTw`%{lsJobkx%)~h< zzoI$;JCQT*zOxhU+zoy>%U567FWYg0`nj{q>y@SL~P6#zfDi4woi^re+GA_!XpDSejkU%ltrDT~0ce28$ew+pR7qQ>Pp-Qe)B2zq zRE8pj&$^bx^s8;MH9>#LTA0^4Y3ls>7G?qjka^Xhj`+)CbPPnZeq-i+{S^%F<%IXo zkcr;BTHC^{V)q%72($*Q#%T%|U2>Gayd(2?^UmHyC@E5oE-5+UM5Vu>(h(coLGDs| z@90r(&;&j+3W%Pb5k+{e4i*7~N0=sP2q9@H{Uc5Hk^Gvev3lD*g!vx8y78gWAAvwVPhmU1(&hlE(N zkJ6@UH*dAx_u-_!A$n`vm_zg0)e6dCJPnb;`SsC$ATYz|Cj>Z4CC76w9n6s_p5=^O z1K{v`Jxo|nFXmFWR%F_Dz`wMn*!|K%qgrb80ccA90DTsYX9=CQK{2;XsFrH7?ffC1 zFQr>odYZ|qR)yEy>P8I`nZOnis`1HlMIUc#>m#-ye^7L43aX-Pxtv4Ix!aO2&yttaN_( zEbvN>h(F%!6M_ttaLGN+bYc6^ih!Hk0}|E>B;J0pbh8DSWmG}U!q$@r*hBfM+ha!g z9p0DKc}6J(OYQUys!Qo{_3l!@l6({J41$`iwyZBVPhU(+;h4Nv4v~ z&MbTp>~Xr>Ke-EF$ob*5m7w)qBrwE1%hQkE$_?BkkO>f7b)#@Ooxm5emXZOL$nE|d)7STZl5fR%8jy`jB zA(3vTa@w__TJ8Lqc;{f7@!S2LNFe@5Xw={}pqI96@Yh%5CY*#$Au8qm4_c|6@lcb9 zS)the73)dcLUTeK`*_W>TmqeswrQ@pBUWo;#9{ZzyIx+5{XsNT->qb(3_rX_ON6z(0!vnacv~m}~ z0gw+YBJLMC8$u0KK6rkK2#ws&(w4u6^K}}(S9+yucPU6^iL_kfH>Q@K3~-C|BwG&V z8c-|HkUzwvadVFC#J^GXw?(8y>Q9HrIUda)4iN{aLj)SNC#kQ$f+ov1&%O@9Yrv9o zN58k?dfPK%*XJavH1J7+Go9S8?D^Cm4iWx_*ZZ+tQ_7KsO>@7SYyIto2K}Fj-!kj; z|KNV=Kgx?{OKDe>S>)CJe%#h9N!)iaNP4z&a{j6~8E8amLC=wIj);_q_M6^h4!;{f z!mf6-vYBfB=@1Fn+=e0-N@2D%{8Pc69jl+M*#5AHs9L{}YE@|-ID1QaY#*rs>R!TeW^I|;qkFs;wovzBZcITW=(!1?Ii(kkD*pPi?J^dNqxEBe+fiX zS6E-3eYMCr40=-UJk;!79idPdA+$qbAx_J+rl(|oiQF?u3Tej)xZfso45G}+Uk6?# zbk9i-?1n^B(nm&3i7j(B2cJTiz!K{qydtNVgJE)^xM}3Z-SFC-61AzEBqh(o8Mw(P z%Vf;-dPOCtA74QiO-RdQw4A`Z+sqI$rFvJSNr>~kzlHfvrv zl3Ebq3$XB#1F|je(JOjG?U(9rC#&kIznlpqHQ1tfiixO$8!3xejp1LU#fo1{uu-Hn z;*&4w>VI+;oY`_OIG1`mYf^~mD$ptqOwk-vzGLSuXU2Phnz6QlG`lYq%lk1QhG)hfAY%m<+>a< z^E2T(qb!R%-UH>EDQ+P_9t4Ta>d~+Hd%&B!+(VAq>Rp`)HIEHCiBO=t@2T`(4KHUu zYOJ~dPZNI!|HD_UwTynS2#HS)rcPq4-JiABZ6b`Nk~iUtsdzoJtkmOG>QJJf*!m-n ze%PKsn6c8$r-q>V0qXx?teH`I7WKm7n%rS&77dBfvRYg!$ZOBebzqv0^92rdT?@mp z=s6N?u!_!HqN6kvNpBhrt)4D*u(MI{toihgMn64Gie^CEes0Nf`cGpBVR!wcxLNbI zl^v|SSP~}kvt?XX;^OeybM2VtL%fgcGpFyDhJ|@2(lBf4xxM?!|P0F|)xG<$Z%(S4M41tM1+gN7gEq{MJdgO4@>aJ9EW4hxWjd%8lrb zhcn)Oe(vmuJ0o`SQNNz*VlOl&u8_k=6jE8&>|QGmzjptW%_}%?<%_+e@2mW$l*@x> z={N0wIHc-(E%Wi*7|C^@x(;2dI1a*eMo`Kse>)2+DgBE|>jE)!F z5o6{rQs+mHw!EPGkASC0nvf@_Y{o!*M#nkM?tSH-j!-NC zw7l@PY=RQF@3G#m?XoV`6BZOk{@|eJ*wC2K*GKdl!&gHvh22>dbFn9x$T?UxZ!d)_ z&qP=ve4Ks6#H?_SjPg~2e_MRM+4!e+s?Z>)TFOnP^2PEZ#h+o_j!xAsiSF0)(Jya7_ikvuzd7`lKNux0Y7O|pqAVxW(VhPMVmd3Y09=z zMwH1yoiEwLj^z8*)FKP}p7vXO8)Ev{_%nIQy%MB(YwN!~-KD z`gT&+j=u1%>NoGqoy%OP_5-E_^#v_)6qQa-#&7QVx)5ddBdt}z*^g)A8n0usIAP>Z zLme*}r){WbKQ%l^z_g4X;~|s2%|(uC^dLHYbuxAxiOMnHWMmOihfa)mWjoKR`@&R?l&tTPC=7=kOr7!1$IL&GRa{!Mw*opCSK}|+;c4AeoyL7OO5ePEnYWM(0yFdG(CUKz;%O=-&pYGpZ1M(RE9YsEEA zaQuu?u?$sxci}ZWJV|E9_Cfc-J#PpHrsF%-!$@GQoTm!(M?GVx1Z#15F^#96Rv5$2 zNLvXK))67EpI3z=&gYYBA)T@~0J{Dn%9LTTF$60`+g(*#hGV=>8sE28-F) z-57(Gp95SP@NCU7ATkJj>a{q1^9$I665=XF}O^ zxF)9yRSj#dzukK=duBN)k72p9k(DcZ*j`lU&1k-sP?KTi7_9!k=e9PFeC3G{%*xa# zK5>90VZNAxa6(jna5fC50y`0xbbr?3mNZ9Gx-$Y#Li&PKSB#v-0mDw*OZ~$}dmJlF zqt89OgNcacTCl!j`m$3dE~8ei<`9XbNCK$!EN)+z>3Ac7VZiPa+UG`5slX>Gqd}*y zi25EGyH32KCcUBt;*Ias153t5q{DB2gvOE&KK)GfG}A_Qqp{M$FFI)A7N3Van^fFk4N_+(UFXL!SOGP7qM3j z(V%o~9WRk%fvKwiqZoVel-21msg5W$J>8C@wPwJ8r?)7fFAx8TTblCOPKmVZg9c`y z$@@`&JN#V7RfKP0goq@QEn00jpqvI3i&Cqr1jH^vf3tr-K6b@!~43Fy^zP8zJ0A?u#qZs%C9H5XR&1qi)qq=3l(+F zY{wawIQAg%Ct+J+O+Z*D5g`K(p3Fdg6t{djf^|u>>8}bkPiL+vQCPOg50mx|+bp6$ zzoFReXnvmcO@#$}39ps#smY2y(TZ8f=4Cq^vX0UMPx430HR5t)<2kk91 zXILgA%4O`R5nCaxDKffOau#T|VS@r@nNMb&#NzmeJr~}&$B$Xu8@(aQWe5anFAj_I$gLNz)O z34#bE4|g147;V$_cfR|TZTN5WuadOUUeazC;kP=~e`~IM-FlAtS{y?PS^k2s+soXQ z^Mkq7#)tWcWy8*~BiuHkXw!X~G)=a`x}BiTIS50|tKYbu^djCD zi=w1;jt_;NN6A|LleN|Lh!K2Od}D%owsnid?1+cu2bY%&h_pTf#72b@602gZpK33i zClpzp>ir?ti2b}RZ@pDy_GA*`au&SgF=Wow{^-$m07hrbuL1Mt4zA2;Fs_$?{ z5vND7yLv^ojhOnh7gz4WK6i0_|COiKdU@KSqci|UfYg%Pf~{n|+LKu61s&TTf{oz6 z1RH$+7Hl{*jOSECuFdh9)Z*|-?wV`(v4>FX{bMc8La6Yjc%zIk^`6sR+Hoe;OD#71 zC`rY~%aG}&_ZDJ9^eUgQ7Lw8bOS9pN|1ZtPY?V%-nV2r57c&sC1I3T_GZZl2#P%)| zHVOV+UuXuSA$?X1`9(g(j|JfmK@hi97{xC1DB4N?Mk&u0MhgEjp)_BkR$r&|Aj2dY z1Wae(X1K46An$a!Rq(WrkXs7$$9Ij`xF;@!VG3Y^J;Dsf4o2{P1gRu?jieo>z&nBnbh)kV zV^F~q0u^f58!=J0{SJFIr`ha;ANAvKy5zA{x#8jV`+6Cf#QTu;0lQb$Tcg42-k?=n z4*^48u3buMc+iMyZs2Gmz}^Q=&_T!BW|WcCa(RWDj(Y-Tgg(pS=Dwxor0!~^2U9x> zSq(i5hj_vYt^RL(VJrU5v$=w4tb_=6TYjVK^DQV@jG!dg?bj`4t%ZerIvRxjr7O^Etfq}`1xd*sXfi3BYn7~ijXo5lh{xGmJ=O2&-Q-Z z?=#%bMg0_-J1CyUPsN416#cnEIxp$-L-p?30;yh=a_+sEaTH`orX>%j{LF0-APYA? z%JyfE&YiE?JDh+T%iMjHx7G@K`AB)<*WJW5ya;~$@U5)TLUg_Qkpq-k;4S}yT3E1v zk5X=SfKm(A`-syzM=in38}s2z-fBk47MfzZqk?xfOw@DP?5{qlnt3R^#@tW;*(@9{ z*KFy>Tckhn-5!o}dFo01aqM_SSup)r@>={I;7FQ~f_9DBwJb#PAGgka*_`2JB7RxC z%aS8ITiDguB{19PNgKh21OP%_sa4$`$l&n(xX{}X3iRZD+KmFT&bsZFR)RE-_g_&Sn>Zx;1f0n zJc23Vch)8d9HFK$YbAo9gKL5LjjDrOe}nSw7MeKS`P?#fgb{n4Vt-Q^gwT?XjmGsfkXXF}wwU79zyvb>G`Vp#>h{ zH~na(mG7CW88c~Ov2&sppRA%C`dEv{u^UgcB%UjE_9{e$qHHowE6&1%Ws>k}H`wb0 z+*ObH<$x|3eIm|DFQaa0*7+QLqR^8s54tvgLkq@e*!r50Qi+VWd|f;=O%cwbTaSeMRck zA+k=)h`{kUgl7B@Mdjxddjo`caT%j*_u~*YDm=49zmpYz+sDNEzLfMCE=@-=QVl_W z0w?!zh2#ZIZsNAwkeEl7L(G_Z$RYTDv zG0*4dr)o0p{hN_B2=jVv)C->XeT)XO0V-5 z%z`Gl1p#)`WtyHx0f%cH)7j2aWWPcr&8OGr1q9twA<`#3SW#nK{*>o&) zd5uZZ^&fs#ua0B^9@oo22q)p>Ql{ojg{2rdW>%JJDpOrgr&N-ypODI~*dN>}K|RLZ zEBL?^VeuRQ3x_lVtJ)QQ;;-4;@`VSdF3m2l$Ptt!{Hc*g&vypdA7FE_JuDS+;fq0Y zM5v;`y!{-kPRBk;T%|^(6wxS}_)p4#g~{rKy2)n_a?e-J?Nx zhG$eKk8tS{h~Mc>#$muYYxaO`^mt>YG3JoqJpXshg`ra|xJ9IkF9rO?%|v2(I~FLM zg3wo`7SP}c<%|w9{kIAbudqf;79-s2SmPg25CaW`wXJIxsNpG7ihj$V*t*qGCtSa* z&uWFRU(!e%hgwS>J(m`(cTr>EAa#=I(GX@^z*Sfd1 zhx&+Ih9*f^b+{3u8@0ka|ZUrAg}r`r6?^t_bWcdiN&KZkLI-vym=<}l2;X9DF&?IOh9XfR(8CMOOQ)r z(T%!r*1C!HbaMLH8WKT@B2|b93vmtom7Iq_W(GcC)lxBnnihAyDc6kD5^L@i!kfo` zDk$28@;wuP_JsIJ(|I7+WkVNi{uumrRTHK07aaP|Q(Rc~>T_)smrRk7UTyPm>j>O>CYFNIGiRyk7N&}PV8=Y+#f5)lZ5AzUY89mHBQnFpV?hAV zSsM&6Jh}VL!S;P#+9)ynV4}F(??EYu2{K8e@xL04fNr98o9^_u8yD`_7W7)bExreOaYTS@(aFiKu1nO_&rYuy5lQKgF^iNrrgnMEw4qIg z`zgAIPlI|5O=SYJSi5_9t5^D_F9^~wBn?u(G#FHqFxazC1|-=r?NWqcW5PY(`*ZTC zJ$)HMgZEQ18JAKQLGX+~+*azdNvz|fl6ab}QsD6%awnqia z+~c4)|Fn@!)4`(oCiD!6bNC`ngT;(NwtO5dPV0KRL8^V$ON~v%r|((5QFwOdRoqLC zuTynR5}lvU515}=)1RGpr=BxMeT*vy6_GCKZD=$J&ec_?Bcb&;<6AX|ubQy=5JC^u zyjSiQCYq>>2{95#%2f=+x=;Oo;vK|qYTvve$tBx#mKCc9x{02*&ZBQKu52gCXMTnq z?H?N%m|_yIV%J?Aa1v-swBw`1trxi zoVHzbi^(?=I^4NG8c89atujZjTMMyNMpVQxQ8u2Y_nxWbb!EM8T&4d=~%?K<%s<8`nE-R+U!Egytytm#Y72&SU}pb2|2++ zmmWudE3r+YJr#LjtV!#1k@hb<;p$iwLYs-pX`HDAJ5W;HR)Phk6BK&9vI=(atzC0G zsfmYF8W1>T3Aj1Gg*nDXn02au(!_8NLqTIoNQ06I$$v{E22#%k>lO$4*LG*Wss@~} zrZ5(Tv zr>aqc+dQry5oxX5*fLf+r9%1-b7Z<~>#_IC17h1vm_&B@!t^zXgy41{ z`>BKYyk_cV;ZCRt4ZGqf5{1k389~Fb_D%DkPxI7aJUY$x{D%tMKBAaT!&OY&uKUEY z@_HhgM)mNFny?_9Iu?<>5KhaPGwgQj32W*8bR49zqTOk7u?XzE5y)?IOL|hz(oN<# z7OFlb*1{|=Lj1B#>-A{X_A4=nsN`GKC)1acz%33v)bGMa!%Jjmf@0a z1!-V)#S|L*H1EpwiH~hW;$z`3{s79G44tv@ev*;uwwXnZV~%h&e}|it#I=r>^DU{*WtNjz*F?B)!Qld2;TcN{BVI9lj#o{6;`dEU?vd^74Ne9!fOla$A7l$~KFB!f zhLuaVyUBD^l~3?O!7JaPRmDL3;ON9v6omLyeOy!VChIwaXzX`Kup3L!S3^L*fXhc# zGJz8uvZhvtlT0i9qei02US7~Zyd`R%ScEp8yS*BFNT`Vlv-F9(^g?izTROEp=v z`=gNIMe2cvEQlnmPVpwUiomOry>p;y{tfbH93s1JOr!B>3oE+fsLYibF(sT8l~A>n7h(_VFYZ4Xgx=A- z?6>f=!tgsoPy!*;q;VYuK|-lEe;2<${?}4OltFwgnWp3(GJmydYVbgN333CREe#BbAiYkLd;Pd&a!t zu4(p8DHJBo(~0*|{W39mbt(0Ju4#MGqVYoZrX<4)usc35PA&-k8$z(NZcN(tZl$Rx z+|lVlr~01y)=X>fCfG{cmJ z+KpjnqKi=PRtcIZ()+o%{N1Pl)4tI0cYp%LVMsCS)P1M7x85r8A6f_h z8ISs5t|^G=>XW_0w5bmoMU$+a$5rE{061tk>MCO}YiWuWG6B2+{_k10=dT2b(v_C0 zb4Lp%n>l7x5K1R6U&?HbS;;ywAAgdr>tp@6f)=mj+Oc+6^`+`~B*0drWxIL9LDRZS z2u5PG>>i`wJJjc}8*!6*YBY0JE-+B`apYvLCQ($th#0AWU~!v^V-19LOUv8MCrACR zyBI=iPe09>)t%X=)I019%E@^jXP!^!_`Eu|#DVE_(JLG4>{@>`CGPvT*}-N{h<5qF z6iKIV6Ec9V=qj1AJ-uS{^1qe%6QjcT3X?b1&%`26$#uZGWhY;BvSBt^zAdwk7an@r zrg#Dw<9OcY`XjEK5INX1vgZKs2H{^$r;7<|NnTVk4`YC$8B+-Td~r1f5IP{$$oioG zvx9JWNJ9o~o-mbfu>r-<&@wOH(QSL^t6c+-9_|3l!Fc7G`BvX8U=F%ac6yO#jIM9* z;*Ip=yw6}NL;oq!4nf9**r-L;2+(euUIthiw+>e>=vsOY&1sGOUHaT>0YnvcDj;!X3WTr8sC{=?z{yd26b$X#>$z_SBhsmQ1K9WMqPLknE}w&()3j5WP93P#!kf=XLA|vf zsPR3=bi0EVavK0p;g33g4UitCdRJ2c)EtpL&`kMa_peK62L3lF);>-VxlLhndyDM9 zoZ-Jr=RZrQ5aYimxN(~e67dDUZwt(CL2dOt+if!2?a{|yM?mgyd)WQ!5E`G`_@S`z zqpjesjewu%25M`#>;8QQzU22UX#B2TVb8I64)hLz_rrneGmCzci+bRQ4B4E1UV+GP zo$fEtB>#&FXerA4SIQVRDe>AymO((sKCv>LaP38pwi>jWRmRl-^x7BKW`fu#C(OS83;Of1?Nvdcxys()0h1eO?;lW6d8}kGl=;)nkpv~e)KUy zkOtr@e2KeE%FiOn%66C|j!&U{#W#=bM-3Z*qj7% zt|5JK8pizT4dX-!(BSij`|)nSt27|Ry4xL^$?u&{$5extA%R*uE_4bmb@UB+rwobE zTi*||WZHJb9v9WW2f)w9kotR!pq}ZWsq)}Y0Q~6V`Y4n#f41ZSPX!uZy|3v4ztcgl z0#+T#1be*rSN+V7;Pfl&w2&r%J&un^RBCQysqc9wrXPs)me{s*$CnCfj5EYOLLTw- zU-S$k8IZsIS^L}P_fNTs_dpFX@|gM$HN*&jbgb7OpGOm40VM%Vig^#L-}jV)2{cOD zu?elYM3BgeJI$-G2SFFZ85&H2KJBy%d=YM2w1 z28&kx$SC&|aK(8S-3(V5My}6>1CEBT`r>BT zE#nx_FfYh{pOeYe4kYY-rs9jgLaAvP`NitlCTErvJxO)3h}5jo83;8eyjJZJu*Zbx zcZ#8$8CRCVm%Uxe{h{=L@984!)m7$!$Z?W;;pkRGlqdTYDw-k z{`3az1BEt5Ug*l1T=x&T?DKJOj7R?)} zpE=YQv3gn9&7lym=NV8Y{o*q~74Ule9_`-5vcLRz?BFTrvjT;*h}OQq$r%tSUerg) zof2>KlU~(<9sIT87Y_X*7hED#$#mn;c($qN=}++F@a6!F!^T66M{_0Oy9`cAy@k2O z4**Qh3ElQSo_cBw6tsM8566zR1}FOy8hY#W{m{otXP-JewAg?6u6?tX-uuZ5vcTuk zqx(Ce=swEWBek0VW(t>pe{6)1`J~V2q4U_@u|s{EoMwCq@ihnLat@)Wqnvro@Nei9 zdit<_Dv=EaU#cq6Kt)Z#Kx#B}uIz=UeEKqtI|ft9f#Xq={$+*MQ!NP0{c^#>^79jC zbVvRfy-g%7_K?uUK5VmwXkVUm zupozU>5i7+GGTyv&n#AS72c*M-)j(Nl_iOtm>W&sP{e{^}CTXi}NX@mtzbq3S|J5>4 z)EY@od}{NkHit@_TYBY@D22V|Yw&01SPX3j87U6yep? z2JHRWGbthZ;}8!Fop(svQ6dJTzwc2O%-27I$J@eRmWgG~!Z6|&eQ5uOufGn9s%zNy zaYVYiyQEt}1nCax?i{)sB&53=6iMkCQo5y)ZibW+Pz0&(9`5^op7;H|pZoXk90$yv zz4xrWX07YIP8z~l%!Sw=dz!9_H0en&qZA&CVny!RL2>%sl2k4e^ERDt_dDuQ?f|h0 zo=_ww`pK(>R*ZZ3%)g`);%9%ZoF1>9hzC?A-R^{J!r;S46Qlk|=+V@-Z_uGx^ujr^ zzi@dOiz`Y#JKP5KPG}Nz;xWn_iI>DsLa7IDYL^SI_jIiANL0}#$NQ|~D<@d~d?CT#xLGL2;oY|z zkAdGwzps_W#itfrJU-=SrG6hScd681oz@S(s=Andmw}KZVtAA8Ol9Y1048=@;%RoM zyob&-YwkvCuv%MeWz1{^B|1~H7Vf<4=X7hF^WE`u&FnC_mes6Dnblu)#Id*l)(NQj z3r|>c%UY^2`rV#&pjC-6ilisRA|v51?*yjeU*3s9z&k?STVrzy`{@OVZlTW;|M1$SNVuGC3F>($#cbsbj1OG=@=W3NAFr`>iqsWE9xHki zDvdn4VD9^67wyiqZ(7Oi2T~li#>*ad0RFHRwAm)`0@nEo(Bx1;of9U$@h-*mH?{x< z5w00*_oeNi_{|0Fn;AZpB4HiWe0?XzEY-@jaHyKCH^IA~_IgWZ2g(QiW1VO)Zg>{p zGsu%~$|5AEz)iXa{yH|!_w^Z~86?*3NDnhd(%)b_QqihqI;bk}S<8~XXmc-n7pG{F zqKe1p7dDC&)nV~?9~i*4(%#8RwWOCC-dJc%ps#TzxEEZKmZ zj^_EvO&~ug;yJ;C&b$_163`cNdXaR@y46%X1{NaiWAjU0R4SC8pk4hO`4Bj#9iaJY zOD1dMf^CCF`U}y8J==AxgGCKsn^S1)>>S(%ELULWou;q}oFw*pL-XN{z_M?Ry*Iy> zf3a+hC8Lg-7=R@ip5Pz!~= zj#e-ExqE6Zv5%jnPSDQ*<>)e26|Fl1t`%NLDl4JdekC@Gke=QQX=!TiOOiiW=137O zvjKoY&YwO=-i7GdgPo~SY;y_z{0Wkd-p`WM++F*AW`>v?Zl%Pr;^-D8U3{&?SYs4C zbPnnUye=058i?s^t)6q&1a?AMsCMZU^>}JHVk?*O#2?(Ion8{dqX%n{H{F?*g1jb`kWVyC-^Fn=l$%t09O|m=1exN<(>e5G$$yk<$oailc z6!WuH4=M5(3}$`N5kl-+UOAsRY$jXc*rsI@k=5s#bY$SE6sMP*K>A!BiDQV5o)RUZ zSyuu|9!cNH=8W4m&63&dL1~IhoF|%M4?hJltiw-%m4Wwi6qRSzzAj5jb+$c(sOnXF z+Ry3PZwEllE^{HTRU9^8sE}1eu0+G`6AEbapv@BAN&Qy5!$^t|;ShXm2K_x++^F@< z2Dj0afF5sS5UFCg<#zfC`g)V!(YBX^2?fm`88ZpiZkx2AaU+}U4FFroQoUM24)l13x~K%W72MgCNjBM0*oXR&C^I;&)co|g^;)-9s zBWo+1h&sdV?TKktlSCAWcjZh}31ih|n^+Zis4(!(&JNrBt6F!BKFJ%ZNBeTDZ9wU9`SjgY>gB zoK1MMXAZ{n=$NkNxvLl*IGNK)uh0vuU#)+=-bN_!baRyNb47p8o*_sYV=p?uCjv<;}lBz`#?GNg*SQOC2 zy2G~sttV>e<;!C#0}5}`%t8tunyIT_xpLfee6J)H#aQ*?KQ7bIDjbTEz{5)ZO=B!x z2nr`Lt-Z0(V(g*6@d?$opk|`+t!MbYzp!bOH89CuJdp5~J$YO_rNy^CClQ=+wOjve z7RIW`!Y{x(t`2#h|8|{^SG=1Xm?SwodJewjgq>k?LgH$xL{GiK zs(j9c$~HuSU&sEO;GSuyV#%!Ovte2=bY43|0q?wQev|qxSG+W_rS2`$V}>&q5!rmd zJS0wB)MAA*_pJ$j30Ah+ve>G~aE-oiDO^?-OfR1#h4>4{KEUQj%Pz{BQ_?d>_FFqd z`*mpjU@?q}UOeL+Z zSUh?&CIyig)&y2HwX|gZRMJ+fjXtwPuE-Bq=dt1b`|vfUDAz2(4q-y!wia#W-g#65Pq?1d=gxG+UWtz@ zIr_@hJvTM9Q?p-nmwnNbwSa5Ak`9YF)~R9NpYxZ)0^4uGU*ab@Mjf7>WefJl0`Ou- zTyfkOC!bA;w<6-L|1NH5T~C>R618=v@mkrI_njWtIybk~E`FG}sFE7)q3C~f?P z@M1r5sMr|DxpN!dJr%EVKG5sINW73e?CLUbwM@RaHG?UZ4O*oIOG7Y24n{*Tm^K8P0 zAmr14oSD2VnLK9D9d6O;JKFAK>yNJhp@mZzglDXs4yDNIMM6p}1x+|Yg%SK{A!_}r%w>%8JPH5P5fHdfka??S)Eg$3LBEvCh-X;#y=EsjC)4YPL)ds#+F~LB@WUr~`9dIAk(ls&^R0`i)|_#Pnsq4?pP|MG_~w{3p3%ZmyDp z=n*F1!I?3|yX;$Py6q=QLQqWRWshAh={pZs{f${%gD#EiEdzCc)7PTsJO@8)h7;@> zo%qAOL}#d6)jr%2amZgoC6!G&I7Gc@G$aH}gWlAn>^~ZB zgFki^lX-uu=GU)KnZ+|eX0Z~jPzlH^czG35lG3m~n^=c_4V7iJ{A0F_>P?1k384!-#6nO){eM_=8pQae9_k>+3o$0%c6L!So_F>ZFiZu(~hoS zvUcpdxOd8~C#6auK!_!AUY5Z-nHHWbD)4n%d2luxpk8PqV0}D(oz`zsyCzaJY(N}( z)6TVwvP<|vW$1EdH`R*1>A~s*o0D*vC}qI?Z5X<9>~KX-hY+r&z8~Q!LK$Og-wR5i z4<)H%FeFjq&qjz{ky5n@3er22G(#WA2AZgl)*y>a+IMk;(Ya0Sz2rcrr*3{@K z3z9jh)LV&PPKKokUy)Gk54tOte*A=0q@dL-S~}cQ#jRzH417&8e{;5+&vZNBzY%%2 zVpb)VPRB(zY4C0Z4u72hFk2A7lE~P6`o{7Z_dWi=;im@fnv6Gvn9Qbh{|O#Jy%tQ3 zKj(Ik7zBlskUXhV02SHFd!DhXgt}(MUu?DldM4Qm*8}=?ZvIt4kxSqEI%wRM(s4p< zRNhM)9gCWboDzYw9>^6xyaOZK47hMldGa&VJLD`eUB@AEtJ;rFG1TUb&E$J%d>yW}^7Qra<1@NS{3xNb&@NCH z!ZPrSAS+299fGu5FbS_+L4|`Ny<#bkk)}ux^tOoS%!F!4I4W^w#ZyZVf!BhmQvrj_ zas(=~AXxJk!dWDcqdnpc)OXD+>t|%%{v<9DEB&I7)n_rFbVuFTg--)^vU7Z-Xv|PUXGRV)_GeZ`uIjx zdD*h*2#AnMnnXezjCdb$HK6Sn6#=3JzS^{@rTl4 zwgH<~=3IfuLmuFgM}BrNRdrP+4ldShkQ{m6*m99_t*~yw!FMPija)h`^a~sHZiuOs zyyn-B)a2rQqT%Q`u)~+eO22Z(5QCPV+c(#qbmm9Yy;3cocB*sfE88qQ7bI{nmSQ;Au1>{@WTpXJRXj6EV%W_Nf!Ry*bMBw|Z)SL6%NX6SfvfTiT}F&cV}s8qa|ds|3!74=CQOhhrFyyl z&+)YUSb*>e`IXZOZv^)Ip5w>jTwFYx0FqC3D`1~~Q|_zzAEQiWtm8jDZ6Xx(J$=d- zuW&@7Qa~k3^a?i5Y5TGdm&B~REZNYY0*mAA`Oq=?O|G?&-e6g}e-svPb7{2KCl(2~ ziIi!^CfO~d@#|m{^?*iwZv{3*`LF#e2w?H#7{629IPKKE9FKz;m_{f`w7Qs2WH~Zp z&eKrqP#pCF(RUx(G``iJKsp+p73oDQ4MZvS;6(1mg0am8cf2NYBnWRA#7W>@8ub>KxOF&^^ut9!~ zYM0ceM|oShjJF4qEUl;^DH5L&72grY^$J%eXi|H)o$c{(rZGXR*x;=6NjH~8F!MQaq+J2Li1iPZH2>5HCM zi#D%|vE^6h+;Zr77sx1;i zeGcnx4Yy0#1Wm@S?~038=(NbkL*gESera8a!ce#NQ+1LJnJ;682@R-x+kYYcBF+9k zLW^T5sLuHkRT3%u=jutLG`-^p~;3-r`qpn7NkefCk)awJbHp? zWD}g=y$-mDj~yO?shLtj3~M+&Sj!>%DUR*Z%zX0mFiQ~dl>pa5@z{+JOKR-CNB7$i zNdgj+{{kY*x~XUpq3ZSGZ$Etz*>JQ&?xuyujKOM^L=H_|C1`gb${WJhyXF(`6}`LG zAIMNTQN7usCX$*!a*SHzi^Z*{{pFbZt0BxJ|5}3VI*m#>X|g02$gA}2SWI)y;=hn# zxTp=yWa!|VKcRxG3QRdueAE|;WN%F%LhVmYN4TWG;m>!N)JhT>+rVP`*2~{2VPEl? z#U%!YZf)VPQWV(p2>Z51(MzAc!HX-G<1PF&nkaB-7PGWX%HKXt?%ny$YOkv)k*gw4 zPX(jWe5fEcxGW*9Zp{4rN^VJ)fjC==hmtn9X+AH;{Z^}J77$ppaDNh}P4U5G;$+!9 zGS?^#TLHJ8d{fJL{))`jQ_e=Sht;%bQL%$!(NkwZp|1oIJ7bTk`_NCiy0gc6q?PnU zg5*XVy`1PAzbL(Zboc`$L7)`sJx!|y$kPFZd2(6yz)<@G}^^j@BN!x_gsu3Edcvel9F-WUW{kaUS!=u_Si8)Zt7mENG)w0vf zckK*KKBZebWiGp>8QlYlfd=P!krp51v`Z9A3Uv`Okv&l>0Scr!Y~V#MZ})mlLttLf zz~Zx4G_a>@UvOGGt(e~IODzPV`tQUATo>1Mw}?Zw5n3z=D93~hTN>nC*cJJ!dWn}1 z(tW9wGQ7&U+Og9vVv^0NFqB7BF_>M&X2?&S5pm~heRpKfq7$cn(~SRomz!MbgJ{l6 zMTdgkfVr?+@Ap1|TGj8+fE#j>x@gchZJ8@F+T=3d{${1#uK36J#Az*^wM4aPzd3)h zi484Ty^VnM7Th$a_^6W&iK7=h+MmxVAnrfwF0Mt&KTL*mzQb-?a*?;BkIAct`jqo1 z#4Tl!s#wor0Q|f07o-fVY;k^Kud**d$u1U%@mfTp=PZ0}>0DS}td0Qg2*UeCQp!Ov zJ$ZAz>T?>h8YLc2GV5+!T-zkTlbNHq#M2&A%~N>(K2BiDR>I4=@;>eHiF-?Tl{PNQ zLjftOBXaxTHpICSwbrk+&`=eVwiNpXPN#!R79^z)Q9@N~*-eM?AQ+uisxr<15CZFf zsClA3(US4NY~ifaTs1-1fW$6`R;sSiMecobs*TLD7!?KoE#nJAxoT-}uPON84tF&%ROoyF5b z#^HH-uNeN-VqL0MW_O99<2IkMN9!T6W@H^L$#Fa`86}GQkv|;#%LH-OIahm#;5pD? zfYCX{EA*tZ@XIDsvK1n=^8TU^)mivG=`0Y8@e{cReVLXhAPYyXGQNvq*IoiA+$+Y8 zg;%F7~%wgA@P3m*rEoa6uU!~C$cVA zxf#H^WG?zTU(l7n*#6a-n&-)+-#a-aL++DfvyuebhIL-deI^++fHw-U(_;2K+yQBu z15yiDH5;OW-(Mo9WMl{XXl;*N}2d zUveP@{WB1j-k5xrm3!4$xa^r`M!ZTb`3jEQ6P;25^oDB~HWl%VR>&(zr_ev+Be zDb{|0c@B-@M+XnuEsxUsG#o~IpI|pS^Zel(X(%4j01t=Jf9i=J5lVXdT%Vz>|6TFD zP{-&K9%2y>Bf?J0)!q#h6mIinsI01D_9s#g>u;&a>YsYrqXma{`qv$IH!d}!xUGKq z5yruas&9q#Gb?ZKXg_H#im~7v5%U;u2?#I7!LyoSKz$Zz|35y9saXsWtlMlCm_`d* zq5-v>h9+3|2Tp-KYjtm9A((@-moS9&`b5OKisI`I4F!?4Rx7=c*nS?P z@L75$%y->MU#(_~7XDX6hK_C?=-{{ELxDqdf}*YXD%%<&ZXlCPJLE||3B^|n6!0r` z&Tl90lJhY3JQTER%~A?~VYy(+`ZMPUGahr>h!%sR46+oJXD0rN;_t-grUU#c@qK23 z2%;66;{fN8(+^Q26ET?C=s_l*Y%ivgPZslt2nKK)+*qdKAvlZVi*^PjR+F*OL_DWk znD4wNU_n#NJP|?p;UBH#M?!VDJ|IGR6@;c!iO6P$s{PpK6#`gEp0=yVGCqAYeGs`) zZJbK`aTOYR?ui}`w7W}TxI#GJP3>MV)G%U!mcC$>F!fLLq=pY&o+gpXTwYxOKjA*L zt@3?vvCnn!18srSB6AX)wNbp^`u!xScwsM@iHO)8@c51Pq(%5J;qBUJs_0YZ8o}c! zbB&k$wP-TAm2*zMo7Ecr%j5kSrLZ-6{db)H6X1o-}D zNKk|9*6GdJUG8jaXF%M$4I47AApvNNpK!KBvbWMyBu+AdPl4N0{d80uP+h$DS_D)V z)$hwgw*H7&?{RS&|P5S~dJ%<+xFp(N(f@Vy=UNOT#lvrJ?3{n|Y}n0IFwALLrz z*cadYU(Lnkcg6)SY57t=?<5$M`}Uo*0GYd2vlylt+^7X4)6^deK8|xQ;7CgAQl%LzD(rBAR-#;qx_}*J(T-vl7iY1Bu31jiIH^+f zWo(ahkyyb@W>lxRJQarEs3z@}M(-BCXMSnw>qz`0AiB`i#ys`rU3Dg_L8h&Txvg`DN@t8LROZo`SMA67|?Cl)pR} z4g7o>?WxpmF&0n_hFgt@`Zkg&l`J}saTT1n&8#>B+E<)Ce6xG`^wYHO;#2kzC!ld_ zU+^l4uh~bYchnk#>Wqf==YrKVYCFvl!^bKrTU0^{GA({7^m(&{X{p$cm_=EvEc8|Q<@6rpRG zGUtM7q+S?#aiSU0-POkSVWE(gZ$+^~5SySP#WlGBruFQO_{#w#3V*7rmyqFcB<@Ib z(`!p(I7aV10e|PK&Arq3Xg6MgmuYDpnjQB(j@WDE72sS5vGOFmh|m^zwh8yw;1(6* zp|=iN0O>%DQq&ifIUM9GS#1Z824F3rD3{HYX$ES)aOwl>7cIn-;X$6dCmy z4drM3!$6n13Q)*|QLNfMMa)~V?dppQ%#}eGm_Q%nP$M@KC#2p2;uFxCc$2*U=x%9^ ziDLYjk#kXXB^=OaVr%NFhQ}o1MyGXa0qUtAG&RcxM_J=yZ{twh19irGt3g@J9a z(l$P>!6BVQt;}Mx1?unLM=u;)kT5YS`s0t~DMORPEMQoP`TWQD^WF&*`2S_Z0G{EF zR(PB}Wfp6{a2-O@oYg{jM_-^l0V(Ov9ZnCdK;;@1W8Ody1wQE)J8Zin+o71Vg0FR# z4lPGc@P-QTq63L+v<4&|z`fQKKk?hjedu;sSNrh+ean=6t0s3%6X24M|M zC)YS{&p#~6Z1TVpjG`AffciWFWKOq-*_0FtBE95KnN8hlSFGEu(^ABO)+$}zj)O!ym5OAvPLcfs=PV@6YLzxWFiBluHM4MV@}Iz}IYf2b^_b1<+TK-Rg;vJXDZ#DoD z2g;$gpT~7Na5cO;HvcPEdhQ}^HD#Mm_!wTJ^Q$u9qZOZKK|E?%$>TJ9}80wSO z%HfzR!PmLG?4HCTluVC=HnA~mMu6Jw4=w{QqBzCCWQZ!G$xSAaPiS}`Hkr|6x}?r) z4Q0eC(x`nPPX!8g(gA&Ntb^&N<(6*`4O}Xc*5E%04LW(u#c=7~B~(VK*LYnuj49af zn-caCM4y0_l$2{mHByro$3Ph}nIuRip>NQ1&6KKTeh8ucnPEK&lU7EbYGH91`Dgu6*Tr40 zUla(*hWIz$7KV@?#WPI&SW#b`wImyRug&Ebmxj^EYMcCk2sM(+@=$k%4o=3NH~6j} zUTe^b-^BZ$NG)ZXG+uI{K}rV#~56q;{=@3Qdf{3p}- zNCoq~^b48g^WpC}X~?^J3tgoAWKTOjWW_FUD~~`dPjhk5Q(lB7v<JtL&Ls)SDy( zvQ)8=7(KJ@s)>ICy�~*UzjCz4N62KbZjkIx6}k*ptKu?$S%|5JcL*T`xwVLq{)6Sl4*OH(lFC@ z_r7eAVPW=oHOM^ECCo6tsfxT{&$D$ww?}s+>3%-Ye#owCt#U@$2QE5U<%U{hS4fA~ ze-!haX``obOWh+- z9p0dF#s{QhDg`H&hf47v?f6^u-UX9lbiQ|2jwoC7-vS=2mSHB$83B7*m?E}-2v_KA z?)1)eW2>Ip|4H0O#C7Lm4%^$aBXaUPx>It+8RSOfHQG;#1CS09P7Bq-2)T^lD9aAg zR5i0Q4Qy$`?loQfMC15NBXvdQJvtgd3xXq4wY&-lG*$~$w^AYjctMV=DT%M(FRbN* zS3@f5)oQ~@j#?-G-e-p-_6L{!Xw?0cTMg;KL!=p-T7}z}b9FO+h3fx>43Uu9EK{O> z1<8y(tGFe%9lFwNnFlwYpH(a}m;Re1V*&>b+=6h&3NqHz`#+!hAFiOA0?H9k^i&r9 z%R7s2wm?|~h=}-H=l_EhdHSnGS|~O_o2jYsZ*T^g7ZiC|l_^`+{@i?s+_+QG;;Cf{$TYBV(M@ec@xYgz+Jb*iX z;k>+gh%&fv3-U&Kt`~SEeBoTM)d{%i*=iC~6pH6A{`KsV0LY0wx~}EFf=4Maz==^l zUDW*gZ-;4O2b}-bDk8DtEwKyNr=AcQ{<0!{mCxDEh8 zH^5zS32-ehD6GOtrIt7X&tsmr6=1sk^@V@$i`NHYm~DVp#Nz@1TTAssCuH!F4a;1Hf=xsQf8X$tJjkz7tfw z0Yx=@enCkU<10(!D`<{))`IvF%Dh0;6V3i#oBdDs10AI&5Jdh=CppclR5QU$hHu zvI@$fGL7*;w7r*P0P*wgevrRY_1&A3SrK$oqVu?u1d^dkM+J=r;ppMQ9en3y?W5FO zCUhFLyOpl{6}ljf`x%ZkE$$@&NC?RW0G$AV>AN9n)p`ohAQYe2G+dnC1)dEmk3Ur= z^Q++ofr;=AaLPCUVFrIfZ6{v_P>6*%JOg2p=v(T6p1J#scQ+Zu_|E*VVtc*5`oZz$ z#^gyo0$|Ap&9UC>Ad5{PIl5sC{6>2ONQXWQRvOfDQEE=9O?R#^9qs(<{ukL!L%J^q zN*z5La=rkHzc`ma6G2%*)Fb|ZuV(}ed>ED9hwMb7)1&h948?g0K}rWiyCcs1HL zfP2v1e3>)v2XJ0dA*!V1NpF6;z4`si`0fXyP* zM_07-w_n%7V$;${dDoR)m$a1wZRr-UXOHz?l!25)LfM*$!e%3> zTTjcuEU=2-@~qi*vo5<^*4H~Iu?5sNpYpkqP!w5MTmjaN9cXx`9p@APjf{@;7QWN8 z12v|nu63-Bo0tzDhOy@t_Si@qr*)7ExOsB}?wa4-HUp{O2P7*`_|-U!Pe0Z!DEQDP z**!^K=cN(YB_V6T)grN+Rjw5&=q7}9qrj5#m1w!*N8p&sYLwrW`~z8|#7yb%tuZ zh;wvgy#lNOUfQlAYKxf^GZ=*^f&A+az}Jqj>GMBwjZ3HxBqH!2qG*!4G`OcjwwD{v z)>(nbEYr{4P+4ID=*w8a(bVEX$D>!_%8DcYXhF;*4$vkWRclZ_n{R%b0c;wRX zpXFGoK4=V3Jv_W2!Bax+forXVWz*4}DC{i#cP(hq%nQ(XxV9~V8gy@<mi4}8wZs>>V?+Z+!7 zW|u6xjb{wQMdyc*1bvC1WasS~e9_Ik)BXGpK5IOx(#|8pGbj~R#d#)Q`=Abi(v6j+ zSph;i85hPs3kh9f-vN@MEDW#L0#;vh<1|9<+q?%(wwbkCqidJXyt;0e&KbIJ77pU# z@yegYud0s(=L_$?S+Xncr)eNeCyklA2^`#~=9f!)t3sBnhEB|z_G z9U^1D$x>YRA{-Djx4_eu=Rb^Z+>LAd4`@q(nM7+Py}vl|6|0E!sPm2Mhsbk6?g<|GLM+{l#a5akC*|BU4~n~ zyB_bM{rHJo1$bQw2HQA>Tjv7GBN`>ObKIM+=x(7Dqr7YZ>>5{Bdipmpc}IPliR=z5 zGg3h!b`pwb-%~%D<~2!n0_fM!73J;X5JFd;lnLUiF;~xFU@*3?iy1$xFg&pph&68| zxdQ_3N7Y_mD!Ks&%0bO-TTLGUYxZ6xQOM9IieHS@QekXla0p;M>))KwEC9qpbEJ&~ z1pObrh*ze9J976fx?G8Z4Xy#mZ3x+Vq|%zDYUqNd*h9*jp1xJ-b*osFC%9z94lKkd#3qRCmI+R2D2-Y%TWR)=PO12e+UM4x zR?m0vZBz#hqXPERccLZy$VSOHJ+oTrWbDl4_}GX~r4!AQRwGuGP^d)Ky=;6lo6VNf zm29L8J6YOa{)Xj)ltR7T(G=S+J7=;5zctT~^sS(3`xD3KTq2MXO)oCNA@tvPSM29Z zKO$cT0z;;;4B1o@qQV81=NtIH{29GauR-5NumUV#OE0E5rs@TRSjDFvHGarbaU=Aw zfsFIzyxU(tkV_J{PW~^2#<2c>6&kNl@I0|vGXpclMUEOo6f%XRIT2~r9&fwed?sYU zp&44A6oAA|^_Qe&b5t^K(|@8IoQ5G;2J}3_y@lT4FT#)xJdl#0?yx=#nwfat|CdI? zZtwEGmmZzzVS! zR6cP`wH$SkWWXzcBtp7@fWWlW2<;f6`V@fONiBHG{_f`MP$Rc)y3V}-eKY!}oYtRJ zal~kZ=$Nn`qf@mHG;c=VOpfXpSz|8(a2EY+p1agTs)QdQEq)lJ$oodm1?@Z=7zIuJ3u z{e6hv3#c^08CUY#S%QbOlWVjYf`r*o)s+3dv7$FH3bp;3j3%O|a$K>iGsoiUcNd9J z){VH#>~p$WQlzWE5W8ej@sZa$`2mhKh%t2m7|;xOE~E0^$>;;uPTd%ZbgbZE$6Wh_ zM0awzVYoXfBwD8ioGxVc~}$ls`C5Wb=}X(p)m;LL<*Zcp08vkU^Nrh22z2J9&lQRJc;fWV@As8uc3M#I)Ni^dVtdo^1Ci7VxuxIpiN?hue2VBib(7vgFan5!+2k}7W^oI|Hw28y27ELjou^}~v3IBScH>DwCUd6Z zn`i-7|EiVS;cO*N8bRa#t)PDo>)e1C?-&MfdqbojWJ^k`1D)rxhW>{>qYpN@N(!Xi zHk0;5jgVPO|K6Yr zXM(l$1KM5ylxlw7gEfnT(9Kpp_?3@=G|Q4VBgmpQ)Q71UBH0A%)G^w_Cchpz$9m4H z%Ka`8EILCd1X{H*d9r8FcAUgQwQ^JGRDQ49Xt-d7)VWRXcRAj*inDH?jMJ9jJGvd| zZ@gmj6;gL)QPevy*T04SR&nNQ#%{XT!z4tnf$#*ZaCPI#^!dJ@bJLvHi)FlZ@#$)!I8uR( zoXK08ocYe+z6F@ZM7UnzyeE)gL~+`-kNUa?`prZqdG~}|8U9vRWi^N(XtjP;E}+*iqZweijMWyd#5B zB?kL2upxo;kikB897z)$B|kzcmYsf1GG-V7iAPCXBqfMPKXrNBmJgQUyt1xDNd7S)_W}dOQ$-hA%B|!_1hmVpL3*vaNltyuxIpprN_E~}1 z3;;p==(#Z8J-bRxwNeu~x^VPgMUe~d1qIV&1;?7(mO5lZK@f-Ig<%1(rb=JKtO2*t zb2K+OL!hQ8iJ0GE-5#yFcYgj0H@WjA0KLOMfI^$UoH+OpMAE52*v>uhQy8V+!@G?n zwXso;m_v{=Ghxfbq#H%*GZYPd;KNX(1AeZ8c4TKa*dB8i<-*j?gaVIqN5Mb zun}RX;xlTbVM9GkNd&8SxwVdxE_jqOpR?}du}OgDVMp|~sN9e~XqrEafaR)7< zoEtFYcP_&JjEQ#(lMSOW+kOLj_|bBXJzI->;7#5W$^^G$ZC%AQG3UE8x)!*G{3($> zMJnh!T@Yp}Q<20TCj}S3HIcb`DO?;CrBLIMCJJ)zQ$IG4P64t^84t_x zCs^QrOdEeZ5u|uEM~A?^M4gmd@!C ziocb0+ERqpOuT@KHY~i0vyDIESaA26ts;U9Ba5luMC9_7^mIthFoG+@Ef$S=UlQC~ zFc}@zJU*q=c`I)@p!syA!A8)1`v`K_QUwMlC@eG z={ZL4fuWnHxls14zl4tdG27N$KgF~5QFhEx1mJGhg>^DktBxAk=(otd=u;FVWh8;N zf%%M1IWII7)7vM0Vm>LFsx840B#Vfuf(N6KF2b=zC*&DAfw1cG2kGhs21nB(bw!e~ zcfY4QvDA9O>F{rxyr=$Tgni89#UvOrLT?WKZn1gp`M0pX5-3lOXk4a-yWsVy4-_wE^Tn5z%DHZ;PF_r-s<1Gf;U39(Q zW_0^&=^~%NY^AB#_D&7ks4ysvL2%LHo$xb=tewxgPV&;@|&NE4|)`<-iyeNIP1~y++k>2o2_A+d% zrUc=dk9wrbXkj6bBHswrv)CDqkAO}6S09zwBwxk67nAHOlI9vzpj=yk86*Fo|9#;P zfQ)PN=zJ)8)`ZO)1^^in)4N)1`Q+5@d(Sc8I>GoEIv~CrkWD57Z9>DQiKMf(hBSLo zg|VdmYc^iBL3ujt_WK!5U7Dga=J=Zm<+Xuv0Yq#O-dn!r*0^TLxZ06dla#QdCJ80J z8addv+ghJQ72EA3a_l)fj229&k4D<0FK%LUXgy5lOPn$|yaoOgoH}`VELCFcf!!hH zKO?c;oFfyNq7k5;pp2xc%Ga>6Kb|~z?Pa-KxWBGR{~EvM@q$n?8`YuMJ2k5s#Z_%F z3gPlpQN!WHu!Zykppz-^KMn%+p7ZHlBnyg?h*d%Lr+0rOI@hSX^_h>oh%ovXDYt(5 z_s6BI0E&&eHzVA#nP~Y|nxh_Mm_s9iz+gLgU=dspUhfvW*7_C&WB3=%=v*3!Z+;hZcy+-oX7(I&58P6M;Zket1?C~bn z-%DBD0-_CGsAyvze=pSk3Aga|^$7f+T=YLo8(+!cvJ`?K2qLmKT>zUsI>#LO{!Dbm zk_J9a(F{N?R*q@6$bQp_%R7}i5lyv#}mzE7wyoTrFA?FWxh zwHB2WmDTC{25Cw(5tfB;dp~M1s1gnjldIEle-4jt0ZY#%mJ&H2Ca7y8`|h~-Tl=qg z2=c*B@elW+8G^{+X|V6*z6<16n*%cB3A=dF-mx?)a@B}B^S%iKq?BN+q%T#ARGpvV zo6TURXcU}Y%u(Fm(xUiBY9twbuc@w1V~;n#@iWSPk=Y-#Jz=y$+ZC;%4fD0_LpXhDa+obmtJlP8qWJhwN`e(GkbiEpmJQ>ll zoLha#*WP}iq^nd+Y`9LOizsqK=z1>BwbehEQ@zRCcwvB*iU2ZxJ_h9EcW*N?9axlJ z3r;TAsgtme`b-Y$m1WVukBTa za`hTsYp?g4geg2CPd26?7^b@@y0NI)$nSIm;3~?XdkO#7uF*bzL!1G&t%o&6=DH?T z5WBVIQ=E%6%t(J6D~TPph++Yy8##>Gg~fG{o^a1H10z&kSv|ef2x6%PEM~%sAx(QB z>R+$lLx-|ym!T6^|FVNNS8$0YAzK+*(kv^B|E5$^2q6>FPg!rbA0u{uoV=v zuSU8*gr1_pe?6^!U5fk5r({-cPJodO?R)6YCgIOdcQK)Jm^)a)q;W`ZB(utGVWX6i$PAUmt zH%hiLbhJF8B(eh$Fmg(625)a(I9)gmKNdfx(Sd;NXuxW1M%RKFDe%TSp9?pZzOaUFng73ad zUyVwf;SLA2zKL`;8}p(ytLKMnBh~o5l#MW~_9h$i>*syFqTw1wpT<%R*}`e@rbbJp zsiS&<=_;3&+|G^?m2+%O*uKQf>}m~HYk&yveZEg7h0|xUG~I4s-{DVMbx%hj%UFZ~fhp_@%)U~}z4r}G>G1D8*zw+-V z3S0O}>piakgyQhRq5Li65vI?H4ZSkDfe#8fL{Ft{-AjyWrsKPzr&jv+Pi>#k*U;5>_tM#ez#x3$&Dw)Du- z9QaoA*Ggw3@vIJfoU$V$<5pf(t<5c~ape6DUQf`(;I{jRE#1#K(ik&ayq_|AI)G)A z>Rd0Bd}k?`gL#>U)(P$@#wEEC7%ofy!x0QPrC=DV5D1Y441geJWN2;aib8LRO~szy z#3-=H3y4F%Gt11_Oi;Mub4yC?J@v@5$)*u4$>h@UI#qw`+Cn8Ok6mAcFuyhDd>?wG zS5KO199^)VHTprX@sLqo@rK` z{W;y}3~x9C)@}SJYER!xECIb$*Xw}Cdw$vO?5kirum$!}pF=)inBlwHGQ_a=Ec6}Q ztIqnR>Pk}~LUZ(^Rj!tL-1fY3u!3`D3D$3e+TYEW)%6cd5h z;LLg5q%+6Uf$9Dk^^&Ib|Hs~21!T1?Y}*pj-Q6i2(k)#|mvnbYNlABiBPk)>Af19B zAYIbkE%nXmT6^vFzVBZ9>_7XDJbC1aIp>&Tj&Wc2r5q_Tqc~I%m?co;Z&l@#!XoPm znmG(}AY`en_CeNgvvapWmqEt?mof2+uG4vVk_*%+b#u&(ZST1Jq`ju}>5IADm`&%* z@89cIV50DcMxi${R$k8>iN8(np>>Xx%jlpI0MQF3DyD=H#Q1JBB3P1BnQg>1$&(!( z8vK+X!gA_B*z-Nz6U>FP!#M~Qvi9&0nf~51BcW6cL^nBAi6LH`yJC~GjD4k~fKIdz zm(++-7~Rrr%*4Ytz^wC#t@TAvwP)dosI20VsMVhAt5se3YaVG}MZ|S(VZg$ckb@A2 zbZRO>A)A}eKn1+x(HS&YepO^REal}j*|ji==g)&UVWi(3;i$&5AG^-jV7c(@6i4%sPfcV%g21DTofDYTEzZ3^VRWuZ3XHUQ{Mg% z%v3*6Fupba;|C!>RLDN9F(rdIAENT?%ci#tQkJu~-B&qyjLIIQy`H!IGtKA)0#3l^F|t)O*<$@v8ufT5DoR*qgdC9wG}yzt6^3VNbGmjqXW( z6{!0LLqrg3XK=0%$yp$0@f^1wZAj--SCcldm6-6A2ur0#Um2RK6wRtLdNXl$^^x;n?Qskayri682DxG09%P0NhJg-PB`JA09OG~?CoM?(RBDj);%+y1ed^UKD0jjX57-5b3oIv zAgu5lNQ5+$5a>H8BvM82BP?Fz@H^Z#K=MmwRdyr08O!y>S`~-*0?EF@cZl^N7yMuW7kEC0;RIGI37!OoFAL_U+(TDNB?gxb-@< zb9S8Bi1G#1yf!zedEf9vf95JP%cLX2uF))`|LNeUo6{LULfJRCCHuNStyN>=xtCbd zy+p(8&y{P#92%%o&YnMvrDQ2B(XgU>&Ya!cds@~dTRfDQbFH}G76oCdDu(Bw>eRexV)jjkM;aJNvg^W1bOY$qE-v9+ILwdJyEi5KV z`bv5{{63N5RD>z-FDU?_KtPSz4__1^&8EEjxDjpIXDlBpVhLTAw;C;wO!68DnZ>0l zf^_@SKv1w{v*C54N$Np$`}Z?tXU+isCp6riL>U1@9K&j74|7J>)Bx`il#2HPaL@<( zlGP^9GML=~Izqwzy^7DPPFPYZxDY62JOGb?_otGZoU2~3!8YT`Lq(zC;FtXId$9X} zbnKXDRakdf&@;Dygx0-C+4nKv!p!hox&@(-Vp5#U#Lh6qsY^TUYVTp*y&y;)Z!6XGW$&-Ti%IvXzC~;O>g#Pv z+3sFNaW?UrErAIS9qkDd!e%9RG#O0s6j(VFS7Q>(D#qx{H$IZNa+C%$EDhaVj*GQH zT=)0~GHET7wg(~TdV@LbRO zoK1e>jAyl24fCe2zH?o#UFWtnAwluSU`ef_a#Ds94wARu=V^Olr-9@zC<7hG?z+l~ zwUxc8jJv})iky?ZjpLwUE+-)Y*(K}0!y(VxwrHmmYxAYf3 z*6y9xog6a5qb_q(Hv@m(%gMPjZyHLcLtnCf^Fep^>S7*uv#XD@u<|fFCI|^K~X{leLw)HPQN(k(Ad~54noMDG}afuQ2H)@RdN`BaeKJNoNormK8uHSG>^TnYvm|Fjs_V6$LhNTXg z+CnN4E;TLIzF#tinFawY`4pRbgXZX>6QXCGKX}&Q`%FnL{rezQfJ3)R9iZ)4IP@%NkRxVkv_ute-lq z50}UAnWX0m1s(}t6Vh005Iuh@IdBiH5f9c1-YhnMH{1T3k)ugqE|jZBL8+fg@nyH! zOHv6dsDr?)M0kDr77IRkN!PzCIYj@hk+t zZK??0NHZtp4zJRUEB{blV_HLPxv!9c^)u0s5F*i@NpA8>Hejic75j^9IH;}X-@pLL zhMwLO&Ff^4Y?%FvY_I^yhJvDz=33gG*WtSQNoIZoBZP>Jn_~r`o1P7Ib3nm#1_0!p zG5&-_AEG3V=*PyV@#10evT@T>l8Xxb5@lt1UNLB9kt%?nXd^Q(oekvA98JQ52dd}I zzvjR02_RlXB|f4(RzAOa-9z7@r7w-;Bj?i0Iyk0dmgoedC1$B{%0NjNQGePkc1!Ua zX*j0uw6JA<*at|tKV%(tfUE;57m#(l1!NtqVIb3RA3$BjJD&2@HB?A*kQvsiQk9^?DrZ+Ua`b5)sw?IWhl9Cxn!(o*E9Tr;TD zCix(`7yT z58raKGQ^rbZ6ra+)jN|U99#eeuMRv2r@yHNy1It>|LZ9T3FwSQ9Xyz`=zne={uYu? zVaQ{-y84Rog7e$zz@Jb<5v?0a2b-EPxdSvhk6`jTCEO#9Lh0!|WZhRMXmmPRA^!kn zg?Tt2vmW)cv{wpHc&t+L(D7SN?BdV8xfA)OcMtFkkZ4IB)1|49 zvO89p8xlF{Ktp!T={OePRd0EkDE((&M=cyt1HHICal&b}MF+>idDndXv{rUkZ&LX` zrO2asAX{%!kI;`zURM6oYY_ZIfT%eT&iPaS@W20Y2uB0r?DPZ&dshEMEdZO)7XNI? zKnDZ!5&^oppwQw!M&j9nu#o@JMnNwIe7s_dssKAx z2&DglL#%^1MD+20!y$}89D?n?;1JO);L&aWZ#YC0h(q-MHyolJ#363}8xFxG`BQ*! z`x6puSSMh+=*V%I)&hwIHK!DLkRmn-D$Y9qv+~|RiuE?Rv27F7M}V<* z{nKy_fk3+_FbWIA*mK?E` zleieK4KVvF4D{PC-Sl%$2hVZt#|5~6f&PT>_8TB@%^|eTF^!nn{?G|4l$-op)Btqu zSAuQ{*(N~U>09BPRVD|YSvw{Kmho4vhDGUbv8O;JUg*{oNmD2b7r^i=v8}|UW;+tmN*tziAu;~9wgiZ0E-kD)x@Ps6a2~4&ZK@Oz zlNR?|E=SHK^neBLP`Foc6bIub<5?p@cyt6L(0g^`RhIrg#__zEfylSPO-lPkVR6}5 z_744a5R2C;D2vbo9@=bcDFx&*g50&c!IYQYju>uwKTBWaq(h>?dm%v7ad!h0L08CV zJ^s`*|Al}B8331wqY}z{3l{DNHs_b88$maAJRCj&auXq#W1uO2^Cz8p4K}pTwVUs? z&~@``8h3yX$XX3R%xwf{0DWH&fB|_1SXIQc-^vc76%aqMd67i&CZTk8J>uE;9!L?m z6dpnv_X3&SPk>0_E53}_`w5`5Khj))(wM>lz!D(-;2K^(0F~vS#RKfc_Zo2Xl1i!e z^y&f*;Yo<_TPp9zi1wX`mPSvZ=$Rm5!}}ro45Ft>b7z&xH4J40h|w$FEcdL zPGj$vk011k3M0chfje_9*yOKkaxE?0SM&Sf_mrRn;p&v@j2Z~iKOFl$aY?3nhjd*- z;x>ukt_Y%R)mwm)J9bxFSIAQkF45mpPO?oOkMvYz{mk*vU^$w@UoMS;B`g%>mzB)Y#(z&-~=ZL z{u96<0ILHSPe3Md8vu4JiVW2{05&Bn#`Y%=I{#`6B-6S!onnI*&Q<{4H;%hVA)GV7 zcnL>yRm=d*?_*R}S535g4S**J*DN|A^VA&n!U0L3<-YlLARp`Kbtq7i#aD}rh%ZM7 znm0fn37ZY6VuBIk^-swY;0mXG{|N9t4?Q6A;E$f$2v8cn&a;87w-c=~kI#rpyi*+N+B2(I!=2?_MxShni;*41c0=h zl=s7JN#~ybfj|%rV|d;4kQU9?8=rzHRv`II2z3bBY9<={TmeEw+TTU!$IoQeUa$Q| zpR&(46|+~40>?$-Q_~Yc5%K&YzspLHeF1<<2kpa?#T#pgU;6c-a0Y)X5zg9EGSvZIwv#!}D>Z)948P7yJcd4(7W5PzRPI+E3S z|3_7cuq|g>Z=F6sa)Fk$hJ^Xm+Or!VpG>g+}++>j+#0T^|{gfykj_OabY5n{M%j&?D!s#kV}xR zkndG>c~szL)lDzov>uXy){W*pT%T23V9f9l+rZ4Z5zhmdkgsD8i(_FQv1`47WW5F& z2tiQJfsldM!GKRWYg!mVkM=bPy6kSz`1~*RhWqD9wQkP*PuK0&UnU__j1moE_?GAI z0G8`P`L}%Iw1}Bq!Do`&J3+dXSlAb04~;vP2dl@*0~i_QHo#--(}n9L>fjl(6!S4q z;cbDlSB&rdx^0+B=Z+G1p1z2ul{*+7ZzXxyk&;@z;A9h#XDywc1W2roUm!SCapF!i zX5GUIw@%o;O4traXjC$q=_a+C+mwQTpPfM^C|ks|rE~4?fLdx|vf7w{#7ISw(@-gK zc;OQ$;gcl&+aw$Scp*1HfeW}i2SAZ^8*DJF*t)t-a?P`=gbTGfRq5lh3Zh+B;{9IA z_d?swLgo!P1$L6|Q%+P1x~a1u-qgN^T{+km@PkKZm@K4Aty?*bL&LH;DJqEXMeETj zMm;X_ShN_f$GinkT7#v3a<^InTl$wV<>90rHko1BkcoqQ!0*m>C%XY?oXmW)TG_rm zFBf|3AG^@kZg{khe|iv>?orNQAFbyVz5^9cy5zS!Jkx3IkU^bAa0kl_y|TB!n)~p` zg&UyQuznDJ1c*@f1h2xSBGH1#R|O38L-hz3l*ft*pJ&(=gbDM*eQ!d1-}DJToeMV# zd|WvzWW->^pTUN z93iqazYb&}2$;eMVdo_qR^=rPtkws5{F2TOYqe_bv9+Tje_^~!B~++=euV#rWkX3- z;|!r8j{Q%~1^`5~s=LH!H8Iyw%}}4GeqOQzmksLnm}$0~RC<{n4 z?+1VmkSrptz+W{e-$)hbO}cU1*d!aF zh^`nkVrEp(I|Xb!9kfwuAE2D+6rsOoEFGAEG$u_pAlV2L>+U&KZ){P^ZmP~7?V8mQ z=F@~O@_lJQ#j@7uLC2qx%o+>N@7TFXRT1piM&~&{#hwiiIV~@WVkCxC=+i!- zwS4KJ(Z5vF$L@CnnFx7ufQgvIA7BGI3FjlvPV+48!_Uz*R-}z~SK0htv( z(SG5T>b)pgr?!GOrQAVDRP>MRKdc%^^vqMs^}Z>37Z&8K{Wr_MV@hsCFLiwUBDZf8;3a zSx}d%=&NelWU{y1rw8bWnBos=M5IF(av&T*auypB`|j}0RQ541Pz=(pAMRkzY`Bz| zi`e7HIY(~qdGuSm#5WSp#PZ2pL*?lhH@FOKdT5N6<0FR`KJH(%B)=|WM+oO5Xee3p`<@;nJ3^#T&< zv?S}53aUDlu9Gh7BBbbbWo}A6eLVm@oBOIbfnnb|qLMCpd)t$@0T zD0Oaqpf}mmx`Ex-gxwF+=%HKY;wy;BQVPrsD!>rpFEHxjz3%LRK=fs_rpy=w3P{{; zrCpOTfFn7ReL0XUD7#CeM4izv`Syh^Wp$i8v>9QJAw0nCrg(t%wZ$vCoa!dP`B;iz zOM_V}J_{B3R@ZpP?+>xY%g+BG*4V3jv|8`;3E*dEJC=x=*#g+VY{-!t)jt#e1RjT`fU!slz$!iDOyD}`y>BP#X4 zt0dM*E;liVJnd_%d&k(|d=cfcTS5d~J@hKonV*_0rj<7jlp^m8&o?X6FN)+_P5^y} zKCUBz9GW(gQtVl_%_74B2|($tWQ&P$nt_{O4Hb9d=Vl8}PtYqC)6>2s9$3NZj!}6Q zxEBm|Q~$PQ;AZ#KeCMXf{{Sov4U2DLkKD_cMhPN>ArJjfm_ayXb=G7q;@7Zxiy@hR zbySHF`3`p|q`QdJdJr@k_LjwQB@j<3I_IWspDK;WnP~XLrfbNPV4WQ?Ri}O*;pZHY zukAC#k*Fr!G=~1I%eeUyka(_4(E8%tR}>78k7!%VvdflFnB$d^S<(^OSiPAqGQqtI z5WaybL|2%*w0#0hH`;fyv$TYDejcGa$4dXQWz=D|4!1uxp~ru| zWr@HZLh6;r`KsJ6MYe1HdgltHBwXv=VgO2lKo3eWmDUAVMV2&sJJV1oHSCaSXMvfD zvM8Q4{#?iwjdUB5EM5!2KCH4VRosx}=TqCp;8L>3kM07r7l4ilU?dEGGZI^gRn3cO zlt9_Gt5t1IA`b^8V}x_!tAtwshM!lJt2sfMRfFrms73uie{Sl3*=eAg3bXExRTx*T zU751@1=;ySP+KdamL*XrmEtPd(l0=HqXErvI_*fl{OX7agiWb;8qckDc|V{x}0aVDG>ph=Ggzq(6Ig<2!FLi(qYy=5eE~g&2igtmzit zP`t=9iP;G_-+V^$A4cuaV>sU$^)Su6|7KxStRNt94F)_PVtzE&bk241ATv~bk;!r^ z6&$xr%ZR?L3k;uwhny|>_M$kaWn%CTD+ZJHUVKmzQxLR0EV%XIxhDj4GN#c7rW3fL zF{8GzhD!Y@#RyQ5ws{v^6@W%Z{K{oNJFFPW5=;8PqSO%fqXAFE=yydG(?L#&q)u;! zR3h961EzFO_JG+CD3_))!C% zh295Uot811Dv!6RJ6IUpIDFkt)NhdhUkt<#_k1+8IW8nxEX^SLz@kJ~w3&~m`^n4n zqGmK7s*oL#L}~4$0V75V#Rm&6g@8=)$~XTqV%+u=^jTq-bsRR;7C(CB`n2nM=3}AlOY$gjM5ok+48B8ufld5%nYv(_`4rrjZFHPUe-hb zQWaG69*e;lBv=m8TVzB23VQ<&z)5KzifbrqL)tVAu4=HJt=Uv>+NEAFz!fpw?u1w} z-ba$5K}vDq6;DaUaQYq$zQ7E8U~Lx5*Q(L6dus*{yZh&*<77X^Ji5^4v7&}?>vh> z$wnh;INqN7JxSyN4jc?twj2D#dm<;gkaZ8Hbc{O}wNQAw{1O=EztkAATdv}lE8o)Z z!{cdVN2HLHonrGSiZ;^KJ8luU!X$Fk`Dn~cYY1yX()maA3cb^wp=~PA&yCb5W!9xk z8Ci^XSIr?SuQO7cr=e@Ev2JC#q?t^-gb|KZ8&l1CXt&q!`J+I>GT#tVW*^fqChc*z zfo6h!!iWyO0ks0_Brt#wHuy#uT_UwHx3_knp79r(VHsaS> zmQQv^2y82BAMl6zOJQgd{Os&)L-V1l3C+~tJmX9D1rWR9P*~WgsZa`OO_G_N$Bf_d z4>-q9!a19wkA~@y<$-UW^P>wq zhfHRFK=7E!r9*H$J2VdoH?nOY0|AVi-vEZq%=eL(3$#>i)Y3r;fF(8TZeJHLVwm>+ zWyJV0JJ-1H^380Hv*uq$489L!*-f=j#)jg?_7c)vP=&jhw+k>QMDSb5)}nV@-2jNO zTC#y2aCp@tvGo;iWF=}`ovwq&gD-nVS5da-s3ifB;@flI^Wu)8fEPqHJ)ym*TTAJm zYW6I1K@Njn9DNevWuXbocWFzzV1KNbA62aJ@9PrK(5Yq+}Q*T7A5}jm+v}~ZKdF5yDCVLe{suZG*+lE@=7Y6Js!h>}j z<{e;vz!$AH%y1dNy>JbE#m{O@q$$!8XOjke$Cd3Dl;gxKMR#E-L$&k;gaF5{P~LSDwBu*f{TgWMUmwK?r%3T}ysLB6P!WE`3oY~kTIJLt|I!f@C! z#C|8PZFF7|y2dF-l6_`zjp0ZOi-||(j0e(cHhk+f_`1(NkbA7_{^%3eWLHNeO12;g zre&j1BGDJaCf`B`oT_BFO%-x-cG!uTEqJKI>ez92FCEj9{i6H&P_(b%Sx_-$QiF(i zbi-7JaLnq!0?mSqdmFRKt8+KA7ke(6?5(-_j=RRK($7=2n6UL%(`=b0WgIRuf7A|% zr?)*SW=R$8vN1z#<5I_Ln|{bnZotalEd41oe&tzN9J^pO=kJJyfSYH%Upm zZCh3Si#A_Mp=dqC*5cTAQ#8}&G3rJP3VM@S%!#&NmxAk?b%yA+p`UB#^zY0U**5eS z&smg|bS+7LQR&5V+dce#G3r<`lOqCwFT^@E zVh4>w*{zD(mX?mYLAz^gBk~7%C=UDuCsIoxWeSMhAQV8v^@|~L*LbdWd z|7Ll$zYG7Sdc4OJv!~%*YSEatPi40(pF(X9-hqL(HqTT9EvL==*7T;Kwo`AQk)-D} z;=nrBW;wQT$?PE+0sgd&GCu`=8bnJ}*uR+$=p9DaNs;87;G{VW?OI&SJDc<<3U09) z4tr$vMBM}*=s$E(*-=Agb@K1Ks|u!CYmdf- zlk1`v=gFw^2sI`*+K>m72L&EZ#o^%W5w%OBfm)1_8WM*)`$d>OthJ4lLi9+J)?C=i zItVE7zd;?I)24;Aj!sHrUYW9&RHRv#X~1OlWoW0_bR8zGwfSisbe&@Ktrc~Z zVh;npxxVGqudns0t~t`McusI1+djKIqCccaVHecaX@wf>G~w3FduL^e;xA}??=@6Y5XPP4}-?qGPB(WIkVHQ zw#gv=46N<$ceV8pn>v1ldhS!3x>h5u;)VBl8+f4)Do%+3;ctIk>8@Prf)L1H-gY<| z776^-Ic_tI;H;7ja^+Xm2*R2*-tQF>hao&8vOLUUr{)!U?((0~@tX0Fk zMSb&EM2uAyJRe>Vpb&lUh8~YkiiI;TR72HDN7Q{)C{$a$y^G$17-Qek`v$K6!h)7Y0(! zh6?4+QN4Fw**-aFX=_g|nB{)@qu_RE_4A>)@fAEXH%ncIxO zbIejJx`-=wHI1sxG9qHwS!6qC*0N`0-MTkS7vp)(8#^KUrJtXR;!kM9BFwPq@EsFB zdG$4qHMNYvH71~fR46;?V{HBUFLg$N#CdfgW_Td<;c~0VJ0vr+Qxz@qHsnb9xY=2k zY#$iT<46nKjMMHEWfHV_T^E`@xOaz%2ePw6#>@#Y3~sl1CU!Pmq&EV^81|!D`_R5Y)NE>#1BX0 z_0iJR$IyFRw8FRHIGia zD~H1OK~QaoFhoRnn&ATU#@iB$4Q!Is(jvjOD^F6&ISv4O7E|KPe3Ob;>G92a8R#%< z#DyPNSj0@P59s_Zg$HM>^~2`FtR#vXFPukkeUQj9&2ZXa*Nf+ET%x!nA6J=&f2|LG zZPgR|vLp;CPCyy4kHO2h=mS|H;S8m{m@7qViU6PWt~9MkrowipMJTl~w1SpU6`W;f zSgSbSo?=fmBw!6|7K#gaCm!(LGJ_n@gZ6N#tzMwb6*kBMX&XUfuk5fOU8PNJMx_WEE z?yte2bwjj%;Ln_Yt10lv5Udc_hCM)OIn3oXZxY-|jO=xzo4eF+kG19R18nmu;xvq) z?Qrl%&5Ti&VXo4zUz5%<#oKyT5z6WI5^=t>{lZ|lCC8`?2^TU%hG}fx{XMrBSRQnJ zx&F)o!TNt1GrUmfg#t{GJA)2X#U*htwJkEPn*v`hsFzMF+ju>IB!{;nNOG*b`iUNf z+bqc0-H=4VdN%VwRzXgdS1;@=aHZtvbJ~m+jphE`63y8wt(%z$c3g`;lq)ru9Qwvc zLpFAt>*~T_e(9USoD-ty;_M2W%x06Jc+LM1W@JosxiS(ldz{L-@}2`%HgipWf zU8WqDGM)|SW)6T+waOSw8rq<$>_0z>{S+Rb5IYS8>`?lQNaaA);xrniUUIvWu~dNED3*GDzl~>`+aWzTCR9@|{ETsO!sUmG#8<*1TjA(?Iy; zPsn!j%kwD8oNa*)xW1(pvz%crX)}^GGt>kZWQYm^%#OzDVhzKtS=c`Fli4&WLznm4 z!Yh{&4&R73UN=vKPrIU{9f9SCvTSLBRK3LH!mNgys} zq!8(OS-355KatX&c+v}~_r!ZzcK2VLrRO<52OSv|FI`2n@4<%E!iP_v9Di|(bLJSS zQFg>DN@o@2^lMsAMN?KSS$KK9QeuZu?eRskZ4f2?mn5U0C6Lko+C|?ujmvXCwabv0 zkyPA+mx6erp;$5Pg%ATKHx}L-E?jX8(=74brH>^Yxn8DST;6ds?Kn!B87XX}r& zl{OT8A0}SIU3(}&PBu*|p4$PAG)42m=Gv$p1-btw3R%Qvhtqfa)ViwX@V=g*w#y;{*%j+29?C%R?P zUQ^naC6%a*`OV!$nAsEBJBs@fgIb8)?9;_n)GM~Fwe~*=kBcq+0Y~Ydk{0M#*kI zeuA|ympUog!(|K!t6%q0fw5hBl!ii$E(Io1H(iS$yP|m4DHXysw&|JPDi^uarV;*7 zrHb3=ZtW`$#h%)^VjwIb_)U|oRf0^ndAFNO*Fj?*|8?Oz)2j3`HX@WJgEw>1G;r~Z z3iKixbJ?Qa4y^bn!g*X30F4A|;?A;wKyy3yo&v+vo}bkdHqQhPPPu19gTW7NzAhFXO1~;JShFL((6g`wjdS6Ke{mJXBig&*o9%L3 z)ma+UVCx{4{>jg(mX{7yIG&LL((5fZoC&6d(7#Ula;0xj zqLHnfhtN0W(ID7V8kP&Sd{u=c$LRj$#i+n-J#$5qX%}v3Q&zltF&pK9l|xzYxcz+& z?j3!s%YzXE5z#;ceyt%vqBFEz`;RQuOp3yxeIEVDpTHWi2FIu%*&k+%`iH!qY3nCl za{(C%^Xo*&iVCKty4d!<**fBGd!5UX0>5 zQ~k%W74t^sJ?kzqygJ>=1w-A)tjoce-#ASdZTzVi+1f(jAVy%@yT%K}Ef2}dkC<}m zHxw;1qc}CaTZTRb$;{}HqI72O&m5SrI- z+4NbP2asg=G1~ zaQf>E_U}!TfAbU`WQYOrQ$nd`Tj3{{p(S%hiM zQU64}yeJm-UW$3WklK-!S%j36;xughIn|1d!j?(2TIfq$Ty@oZn+4(2nOHW&v%{|w z6b4hy$}i`Xx+|`THoMvo7X3f9T=fD4XN-)KgUJ|0_I*?UD>B>NgjGKj$HV=XfHlM#dKl2q3ljc zV25b9bw2x2X5>b*u~6-X4DBbzAXSRj%TiVoRhOBOzVA9O={^&GjF%CYU5P#p6`~uK zW?xLQHGEy~Sn{xKlsyYs+0^}6r7*DvGnhUUmwF!AoGC7ntlXQ^cQTSKOeLx+DPW(a z9>TTTX3z08sxBunO}^m-kA6P1?&iS9SSErt7o4aKKQmtzf+B01C(w>jUDrS?o{>~y z^^8Oa#S>*8=fvr=)C+ck@6=_njV(mc7)%njN=wSg0n8m`*JQB=rGo>n_6b5*k`$Z$ zq!0Q82SU~Hd`G7E2-Ez$8(c#;M)AlMTYYaZlXhx&3;RFeFD=KK=%v`zmEQ|U8A8)Lc9=LBE!#`GF z@c0BttcN3E3T5I7HMG7E7`7~lt8zoDk5mB)vP6KU4_E|yotvfQ8j3~$scTu-@6o{Y2wdmmdC-)S6y>3(1xgX6hF&%Yrd%52h}%qVeJpC zfD{8Y^|(-T|Twl6~dDG=tIpScxDBUSuuD*pe#3p^Iku3jQ$V_Qxi z-mObsqfZX@c`PfS9ct|e%JBz6-+#|sbWF@|6GMSGEM4QFaCpx+FyQ#E$hP7%-AI{j z03Bdq)_JU^wr~Js1SblCR0_}?#H@dcs=Ecg&&T&cAC_ObFwdTw+N?-8x3+ukO0>Xz z&0;u$wmMe9$1i>*o!`ZLIHS6~iSn_fH1D4P4-_oF9F$4i zfe3jOJ?O;1s+QINC*j2a2r>|@ca;_W~etd%$ilK2NqPHM^j=Y;r6Ju%7%xryy$_TP*cZ_s(he5_i5 zgnviQpGJ&lkgJsH|360ZAAi>W4=|D^FvDN5~nbZocVhS0iGaCO6 z%7am2JD_6LdxF+m88l*?#l--1sw_9(fkyf%{fDV-0Btp!ESEx=zWgnb@O?b5bOdmi zU0Drn7KdL~P^32>w3pA48{yspJcV+1vG;s2koMc&Q{L+X{=5DI(7hgI0#>xH0Lv&7u9$0>6P;#Y^exes+bo$&{#mNuea{@;%~$fIDcPsZ;N4K*S#_Me!}x$Kfi)g zA!x~iNx220Ih+dRLMctX)U5Y0cTF+FjRAhArwTZR;~f`;EVkGjr!&^$DL_Qsqc;X9 z%YT2!`F2dr9??$=S}}sYH}f9ABA%Zg(v=mtFWTBU6AI7;?}52fvw?D8bK<8di|5?6 z&+)ZcGLW2K!WU`+EgJ7{hxH%nbI#~tRfhpYg!B=FkU*Wu3Q*imI#Y@AiB0XmQR>;k z?UntL>wB>Ubf=+ppbNU}?dA)78J61`sT-Dd^pu; z*XK}T+nDEa!s#dP!nm0CKk_SFK3mtvb=+oj+=8Tp zZ6xd6Xw2hIjNVU3+?}=7fvz@2vXv{_m1&p2%wNDnG;3r&X%65}mVilR`24`6$WVZ{ zjbU0aEN};H+AYT~*g*9;2>=uXEAwnQpJo!P<59F3Om0Esihubu8sM9r!g*RtqDDZ- zKnQ?jtURgiflOWick_@!%ixitYYoui>*W{b3RHH>8)a^Hq53#*o41O|>%BEfmT{$r z9dL^X;}|7>xhv9qYCy_w(lU@B+OvMqA>oTz4V(tb?N1NKQbVq1?8ea2)m?_#@30FWT~ZJ;Xp8xLXOF2gjJgrL+pDwLqkhA05Dv?4(<|i?lz${Q-o}x}c~2s5g9Mxc{~$n)-Ezu1 zJTw77jY4SzjblYKfM^kJL8*0etgQgoqYPKsnW_FKZh6!am6GoMltxlO*#h!st$ztO z!Zq5~JlnTD{}gVdTj7UC*9DSqWwh^R0I|4M*RP4Hd3Raj+!lvCo-VTwFpL)M-FpbxEprO8ry(8UCC^8$g3832 zTtWQe92gTc7(=~Y+y2DJGX$B0NAT!U3~_rw^nMS=7q0+(CkQIBpN=3W76INb`awq$ zLOlR5AZ3ArJ?WM%{IsuC8*oyzXPaFe8t8s~0{(Hq6@)lb!`(!8UpM zb8Afl{Ie36w=c(&a(T||O5dU+eq3zg|l!+qdG}^!yCYi|>Eva3n zeH2E{zhJnV0PCySFFE~R$_a9Q!2Lql<(svhGoR@rzzaS;UU&Fv7z^S+-iaDq>ye8W zx&bhTqUx_!K*3S{1n_R8OjPk{pBTJ56Dg_TU%OKm_Pze%!!yh|_nv9;qL!@`piXXJ z)zC^bkO0ny#$qppJqd;{2?H)uJZm~T^vlDc{q}P{h41}!eGR8v@}Re}gqiEtT2g$y zXt8>~k%J~Sp4z}*g-8Ll)enq-m+>~1GoKEZY5|6Cfl8GvN4T#uisQK4a_av>i9N`k?KCo8Y|Y@Y?VgBj|TUuKNxP8Z^Jwbw1g1DC`J3JjP8oI3J$@j&4G3!gkt}%xVMao zs(skE=>};j=^jA3TY6}uLpr281O#cEp+UM!x>J#EkdST$5F`WvMausk^%u|kJoo39 zPqSFF)}A$M&$X{OkMn@g#O`W9 zupgmfiA9^JN~zLq<&Ql=_Utx)lIgXEIdn5si{bCES(?N-fqEXHD_^gNc(X$1#l8hk zv07+}_a$f0=P%I7!Hr}wW#0NsS(sEPf5 z)Enx^Oz}V5Q7g};mjdP?It@NuKm_!3!9v?B;+ODB>W8T~z3ZgfbTmy{$rePYKTs${ zH`s;Y&E+c51`IFMv!j6S_)_mS-JZ*3tmqXZKXs{A#7}BOPG5u4n=|0-F%f~!gjY@B z`<0kv7mUa*;YrJcu%?Un`cR9q=@lc;uW}dvqZN4+MvI%pY@ytAEe=c?!O2m^sKJ}) z!O|jnFIl`wcYS%nV3i#P&!}1%Wp`A4e+a)Ep%4g zgXi>$puKC?xQmI9A>d(P0tR30K;*&QVb{qa0H8EA&AHOIKmX*j+n9HEq;fP zqW5?^{>#sio^OiHwI7qtk>-Zk>$?nds*f8(N&^c74HPX&@I1GK zn#{9Ni6{Yr7OinDansfRLQtN(i%wGlEFEx!7shihdTsHLY3DwA>*p=pn#4uxl*}1{ z8{WmDe7pGTcG+ED@_ywDBbCVqKqmGPPa_4>$GFOc3*}La<{V)k`px=@{WcswVU699 zgBHJ;$ywgpHB?J}WkLe=VnXy+Pe0n;_mHc3()#If(}dx)Dmm3qx#K}?mu4NVV^Ug5 z3kCi=06SU!4|bB+w__{E%(r?g?|g~fJwYwpL2?fW>kN^JM=mblL~)yAOHo1u)foap zQN)E0X@V0c?k&(h<PwlAdfiG@he7&{P{%ferGMp&HD24zrtl z-~0Om0VK@qMattUx{4`V4^C)e5Ie6AbW43dN=2=*O@27^mz(VfsttGTL@OCaWO^Dm zf{U_%**pEkVUD796vi;1h>Z=P2T9A9-0)5?OLuxCC%2Om!-}k|TXO|qDUkZMezlbv zpXSPD>$K{L?v$i+5`$CR#xS>Nv537iS1q>WaK+bt?0(d0p!O4hfG(DIRT&W_+)Yn1 z%tx4E(#sgLV0L+Cv-pZI*X1bHUxjO95)EzWhk+;T&y`prBhe3DzWZ=fX2F*z|G0ou zNxlebf)yX{TJ_k4sI}DE!IlZuso0n_m3l^y-^-1patdJaQ{%_$^Dr5P5R-$+6Y=NX zL>+Zo7;18_rYte!SNE%Emlz)k&YtKfMPI=WKbAU9iUv!*|B~tv%IY)?$^31AtKO#A zHNe5J{@$})BYd2n?{V?mu|7m(g^EY{8tJJFl4I7)>L!UQQc*)D_t|K|>XEIGV=`v4 z0H>6N*SI&=Y`GTroBPKQ!d{z7wUH=UY^dn0LVtWy2B*rg1;cQWZ<=wU^H#6S^DBy4 zFqZ$1Gy_%pd-$@7n{R?N%aB;$JsD)WqS06x;Pjy`ZU#m4kzrLlNTTw|)+)nRP3m;A zPMMJ0yrhoS>s{?WE^>BDsJ79VP$&P-;X>=dJu4JTfI{hd&~$(?mcXJX`_5KIe^2X0abXdt zN(?Ht)SLe@zK7H5^`Obgcw!ThBVGNVhDvq@!S}ZsLC;hA)^c9TQ4}@8DHQHIiERXg z{Oi3!>DwTLQy$588PH|`+f*?@by`bI>!qyy@B%V6`D zJ;4N>Vm7GK$=S^{N9-(%rVGvbs^r_EyiLbgq=?8k0&z&*W8v({i@@Ya)}H-*LBZ<( zE*>O}8yAh!EJ{m9?c zD^2|G<3WKEFO)L=2aiPtS`u36mGA07^z11u@R0D8;=hIC^7%f|<7Cb0Wn&7I?Znhn z&xodEv|gd>C>iGn473|Xco!|b1_iW3oC|_ z_lyIt{!9#T9;6J4kyGM3SPK_)v#ZkmF?^&vYrW`lV!^==rlflE*YQD()MiU$b9Hq^ zi)}gvs;Mx9X(N1X>4U6zfTuTnRxi1ZXr+S=s zm*Y@fV~f+sifXSbYGuox>_y_(e;7>;;3Z4YCNKc^EMI z2N>ZAhmjErgQ?8k?&%9X7dBf(SJwH)*Y;Mm6zafTo62uAr%T7%1}&v%GqCZP!YxMu z98C@@1iFCe1OAh#i+{ws2HV^Ze>%JSD3Gemn(ayNmi+#k-Ia6N#RTP7i~SmGB7|0A zbPz+wHhZ-UX_KxZ{w_L{rvkQ|NP4&8mT9kxuJWz1We)uYB6~?4?&wLNl!sqj=@;y7Eia|vGQUojCn=nd}p6d5ap@mmCfk}+k0v|O&$hx5d{ z&}~|!)Ka5`k=(BzXdMTRE4=bjzyJEh@p5Tf4f(3UXIQ?W;6vz$HMiY( zx%0H$@z)T&Ya8I^PiiNbgU0H=xaS`!Ea&pU`YQxkYno_S#J0JAngW){VVJOz%lurn zgfh`~2rl7QhSMnEgN8y-D3$W@kx}jd;lz3BozP7ae;_^&V>@63sW)+Z6|JsyS->KR za@J!p!H#0$kc;o0fq5>TtcQ~(LKNeVCbA?q%w09eg!A5PnvVc8E4q#uk!A2g`%Op6 z2&vpFk8Z-crS$TgDlSIw}Wm*%Fl}5U=%-} zGuELdp-s2rjK-f8h7Scf$%#gChebyMSpEm?xxYpb&c8+wUqe1Jnv(w*LA<4<#l>Xz z$8V@G0-YJS>Svq;XQ#N;6sYY09)*%>lDa*iCOD$XTvLvhf<9*P$2rh>pP*MEyj?5m z{ZPp20anhISQkv}9aD7p;P#FiDF0}wHnl`+z3o6VG~H(*QUaXIY*6c%8T>lECNWSF zhWFGI!{~{YDig`ZCLW9 zlLL$#QB)x>qxqc-3ZUP#@qJo=L<(}v$VKCWu@8p>z}kVbG(4+A=fO2rksfF#qbne)uCy&fyLsNaAX&OE85+> zkEr^Uw1nT(R&2n+sjBq|OTEk+?aeFCml3v~by%=OUtEqO>1O1foo(d@gTy5g<1*r(D-B={Y1S z(JL!=Mf7cwhb=N#V;PjAF@_ncg2B2}wSF#m+3_5dnG?^(HKd1NkU_u`vf`Z(kG#KQ z6Z}P~gcCop$oX>bBP@5}u!RW{HFPK|C|J|il^^(HK<*)jv-=@sL1=XMXzAP4`v@J) z^niIeY)sM~;1K?Cv-tN6q|6#0stq!2pyYKEOEL z5-)8~u+uZ`khsb(u0BGS&PD^OtF(?EV#ZntN9Lsm3@6VDzq2vDZq4h7g|;kd8qGg4 zEPaXTfz^6rse*Q%7&(`O@i0bA#RZIk$?42$7IeH2?7Uy@%xj@I&m)8w+h-cEWoLDHuQ7vpJa;jM*^Hk6$P5R69qiCsUbbEZ{^JHf^9BazuSX#` zY)kPMI1KDqLJ5m4&qKQxwi6}ZLLUl#?7I=#_U1t9SDew!5;jX@Yp?x$pLMq)I3W|> z+zvxZ@w{fQ*03Vu6>|3cd?&E7XJDgT13)Q?A9lc9D9c;N*-LK#tH^jxAAhGcrn5PK zrAWZBlqV3el)|^FV>xb{JZrCdLzOaaYwKhuUh|^Lww%UpI2@iP;gVr85|nymwLg_R z#HLB>A2Qxn*EV8a=dUA3TK#3@?^wqL?6!IQH<&_beW*{>zSuI;pN#zecO2Ml+rYKu zfvj=}RF$al($1t|7M*MRV>uWM^CRn=>n$3=k#uRqOQFB&sn#Y{zz9Wt5U zX9+StLg>_oFxMZ>e9PlfyGiG$hf?AE=1}BI#6=Antp9K*y})ifC@8!Si|Ij}`m1(% z^X#O0oz`4BziVT?@MK}i*R`2=;ZzWNpVDNM^ordErgTb{9#I2Iu~0SMPD2CVXqxqo zU1-DS!Jy{Fne6u?!dZ;@Z&O4O!JI?NX`0%&eYi~3Bo1zZUE4`ol)dEboYvy+c)P^% zgzaJ}YcB{i%R=FcKYdT1gvhhNF-K9Am1<%<0l(~!5)+yYj!|m}6E)4|m@A?{qk+h= zT4t*FO*VIeu*5`!)u07-V>TVQrW=o-JI&M*|5==;?Pz>%uT)O?N4^~OiW(UQ3r(8h z+G9=UV#UYJFYQY4d1-jzWXg^3NDGBGySV8{`^U$URhOo()+8r$vvA7-NHRvUTKwgU zhSQ=f2Pd~5#tr>c>Hab&so?i7Eak~>ECmVcAUvSXMc66E+F{=lnFX8Xf%VT58pDw} z6|L&%mL!|IIR&_C#-CBAX6C?3uxQR!Kgn4FALwv!pyXwaZJ23=<+_%I z>ZLY|o0dIBpo4!-)hoGN^X{4W{J(>UY*urBe?!r07u^ z!XQ~Q6mdQ!A$cl#_=IQdZr+i~zthfCFQ7xm&x$=EW88-4pov6xbHzsqfsE3S;6RDC zAk;A{7YN6q>G;!eH-AqexFaq>454SBcrDDcpth6!p;30kZ~YYilaWteO>D_`MUA^h zbTQP978c7JWM(MijI|>9lriOjyS>ob_+HO;y6(BeYK(=&w4uG6UrjgsG7)`M243tB8PV z=ZAbh6Mcrb!7e3{KKIYoN#f1?n*4l(gX&M;!Sm?&1E`_tFFTpNmpX@b%H7MmUN{=hedLj*-s8YP>G4rfQ6g0IvktsB zT}ANBdk1>{FF^@@rJKH=IM-$G3fEU{s}=_K-8?iE>H`=SQ+)j7(Po@noK8cKRZKO8 zPNhs6fa4OT6E?mjj>1;uIq5U4+*iV(QA)83z~eIg&)`Z7%%(zSqGK{#e8ec5{~)^RlN4wc34cno%uEI>7zs8 zQ^+Slo|Ee1)7%*LAn7a@cDVPbY!g!-CFCTK=a-2cxqZrHLn5N~v{in_JT)Feam3rN zxBTt{M?{D-c&Hn8tQd@E%7`}-baUJhWQs0|;HtOP6cX|KVGR(*v8DR5lg3YQz$qJs zA%;ONYNvHUm$lDxeD>jP<-;)p11Fbc(x=gAcP@2^K$Yjv=BWCS58>Q$GG8(0myq6* z_Bw(EfLX~REfyWC*1e@&&NidN$;+vU2#u`Wt4g(Ip#@s}OTF8F&DD4ih8=yf%Hkun zGWod9CXW05H?oq>%-48zmmQ@N@qOTNu)FVRPsm;lLG@Il1A5uP1B{Ux`eaL#4j&aC zrlTDmlBZ#FIaVHU%SiJTjwZX7@q_+N2;~7WQI_@hMSsqL;@2kq7@w{DS>}om2yd#hq5b+qJpz^#QdS z&qj{_Y}UFQ%Te*Grh(RuLQDYH*>hm&^kI4|5eNA?$r6Y3W;yvxY-f-m=&7ri&k_V9 z8;3$+E9~M!-hJF`owxV|?)QQ+yX;Oag8(Y>XN_#xWmNBuRq3pso-1v+8>}cYVkCpT z4Ikq`Sa#GkBy=LCpQhc83Twb{Bc_;5B zv6R2u1^{#>S(zX?yUop2rHa{e{DbJmdOPT$Tdb;N&pH@+aPqvU2xpYcHW)(?e{trjV%awO`QH_h27LTfiTQs=!ek)2I zYB2~ham!^QGrqr!nQ$2z#Ge8?H}SHxE(u}HcAM%v#-0%T&cfH}+Nm8N(IBe!BcFGP zG<1!ANf4n^nQkw?!12%&lkb^jB4Hirzql4|tQ(qW(+OdO%w7rwOM4Ce=z{YhYBBv| zGC7b;pxzoq2mw#HqCyJ!>((LI<*d^)*J;~^&c}ftmt_hS&+a@}cjY6u&*+~$h(XEO z1<}8`w=}sz;99;vLW(f!S!a}d*jplt$4AXGG=Sn0_9&PxYjQqL-QY){945Ns!^b8d z)~K(l>3pS2oGFmrZ82u$g9b59vqA16?!UT{pQk>mqEMXJX1BJ&#m7qr=4PbH<^#;~ zhszFca`%gPe6|W1?4?UMENfKb9u=Iu6Jx9ZW>T=AY;FBPNW3~wl)$Y%vSldDMQbQ)C}D(tXTwl#Mi!)xC%i0Z63JnzV!|4ck4ZHR zh;&m}Vzpfti&Z?ix1NeGZJAwC#eCrWqRc`pnm_F2(2#Ewk>UzoGO*F;j@kso8AOHu z?x5H-$r^5`p@Y`ueG+=knm-deN}LtV?u|DCIujoBl5 zrDn(Ih@BiLrXIDXeH`eOpCyRnO=ckaR8^(_x6Xw4YIXP+t~0TeCiSXfMcD4Ln5*|1 z__xkvOdSGR3t_3p%1KN``-Q-<-Zx@wP}EdXEFM@2s9DSZwq-gzGX-7Unw%ZJlv&=QF=hDjP-H)2ZU% z7B&~TfcyZNL?3z7sim&1MX~Yr^74^zSl@a;L1S0>_Tz+{c>Fp9qBigUWi&Yvr#DW% z`eQUvn(@Fj_5*@5-+%$E&D9`g~(CbpQ?j>i=5Ud?g+ zU<5%~9B2xT;G9k{NZ82inJIhpkvbw0Mj)omAZl&X1|&*I(C~b5hI-MDH*cSBrG#55 z(-|NN1^fSIUXu0p9yS!UhWSI~-JEo~m&?Yc6TNPm7?q2pvFUS1qG8OojEuH$+0K{W z87*k(#SGV-!_=SI4r^fW;d^9Jo-*t zN9vdduVu%;69mU4agV$flVsFyahhB~Kncy_2Z-g_^DM3%i=((9DiN0Lpi>#ze_=5D zu^ei4L*k9PtS{w&J~CS$NI4XAETz%nB!x%~ft*-N8f%D(bH?KD2{T3E%}^fq7R}M8 z#B#Y0uhN@7Mt8JXuKeMcJpH6sI)TFW#)l1uqZUA6`rb8E0VqsLFhwmKh0$Lf`Ibz@ z;Ouf97oPfYM|oXdfh|ly;e8#b!G&-cqp(IItZ&rHrjC@7W9LP#B)bZqq23n|&em&- zg-<-Oc0l*<-pr$~enS8X!=$=o`NsB6>!+9Zg3d=gR!?fWB2GGo2IwlQcyuvl;aC_G zdM8i(_UjOza#ksZsS<*^!-vLOo5Ckcvc{@M*Tt%9n2k!dm<+OiIheTBqweSUl)a~8 zXxIt46Yf`2B4JrY>%xj~14;`=|Nmh?Vf+H^lhNEpB}$W1$J#1OI1rH;$6^8mC{>PZ z+-7f5z!BqvS$_p6*6xilevnMnk=vok!-dj|=Q*-Rp)9^CFsGD|xz1!zFUCh{;f`0x zb)T=Bn>87%C{3*KR8F+w;RMWOJmtBUjpCOra0wPFn%MId?sQ+V!snpTQ-}N=;LhM^ z4DFdd_~ydz2oy=$WD7q;pM)*GG-gxJ5{v+L)Yv+?e|ADGTC7-iE=Ph1Y!d?=jUr5q z$5WF0g)IYu*9lE!14iCKXiqd!K|mQFfth>lSpRJI)s>5o`=H29lXKP}$DF|W&?SWi z8gBgTFY}kJr4~92;uDyBvtz!Hf&6t~>v>10QTzSurM$PT&E%c8N_lHk7(&4-lwPao zPdyUFl_~4Q_q5`r88e!R@3Rlk>G>A&|I>N&PIlN}%0Wsfztsl=D9hp=(g^2wNc;B9 zZ`_J{HmR+)a@h)}?0WTdXn(EKs)EstAU`dE_sN4TJ~a1p&jaBnH923Hga?_%N9(WD z5>Fe@dUzi$O5;lfM z$6L*dQJVW)#NfVbU@+oai)srrq5U786nIVYZ&S+GtT=R+N1Cz9WulpTF$1|yH) zX0?#CVIP0O&Y-!-W;F^5b0Ob^Y%vVWt=AWNsw}B)1A;F!kAR$J76dXTEZ%ZVGwwKi zWZZ9aZw)Y3I}-^%R{1_ zix`Tx(HcQtbTYUb5F1mnT|1?Pn*XR$k|Q=ON0X`T*fOwU51Q#KQ=&JZlkkJD4%;&G zx6e$I6{v+MHaMBpeYig#C{?x`j%k}xUb>g~vxG$s9lWgjfcat!s%Y=0k)*ka!32q~ zRUR$jzhm;Onh2th5&Lnj`*2vG{B`XRvTS!ST_?AS{g@EeQ*W+0o{JqC6~`g&5ymZ{ zHN2&0$@0QPYu4m{p)g;3B3DkT!~4EH^5tq8G1$&{)i#~fG2@UhmJGFL?_e8E?|-YX zSOqImat_I zWSCKbwz&nX`&{S3gJp7<7z;sb;@fB{Iq(oxX(FCuUbMT)AN!i=i?8IPy8|omc8;O6 z$Naus!6*lUNcm`63F?~XRj+M9_!OdN<4C*uJ6idNRt9NmPXP|bm+?Ow3|=*@Q(ltdtJM>8N`S`|MQ!veW(1 zDsitoQ!S1`Mb3G;b=eScLwKK7G|g`!rl5H)@_!}ug{l4!F$#QK!a6&IQw}Qk331S7 zyMi(YJgG>j;$*Qn4^@p5s$i>%LD@}=-$;xi{w-(bQ&=;qed;a-y=F=v3VKOq69JNZ zA7zhjOgt^{LeWjrXQp_TuOaFSiKUZCGVGPavp1)R5AzazUF(z%T6ZN)1|dhBcaLeijfUUZBvy)B94pJ)~W zMROsVS&8kOs=a@Dk2D=Vz@ikh7W@;9qyGQ94h3*+5aarIH~-To93|G~|Hp5w3?tMt z2S6N*y?z`ueq5*5w_*DytBdUBIK8GBogL|~iueC?Ej*IY^P{pBc){(>h?zfjsO%;R z?J1D&S9=^wN%f|n>P(c2LHF21F`w1{QC`4{3j$UO$!n{poWOtV{T}7PZoXeYoctrG zp0QV_{UaVU2*-o0hS{I|BOde&jt2z})foLF9z@9i;6YHjN}PYhg9726le0&i&;9@D eGER{0U+8@k1^AopImQUUpR&BVT#c;xtN#Z~z3q$u diff --git a/docs/img/chart.png b/docs/img/chart.png deleted file mode 100644 index bfca26bf16a479ce798d93025ce8225da10c48ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 169382 zcmeFZWmHw|x(190f*^t-Ac&+Q4blw?(j^Ux5Rqol-H3>i(p}QhwP;X4x*HZD-QBhL z9`=cS&OUpu_x$*NeB&GE9piPtV$Qjqy6d{Gdwx}vm%zay!9qbn!I6>_Q$j(x`wRu; z4l5=q_+-0E>!3gUvMYgQoXh$&7h^i;mPKxC1|K`2vJ@<+PkFEC){s- zK)Lx0U&;`(=+(F$?E{JsJDj<8p=&qHuoi{wCjgSrB~u z%`s_Iz80rr*T1vtUB*yG;3HA2&w`=;j*CzVhWC*#iS2PFzKu#&@Dt0PQ(j5(cTnT} z3bPjtLkqh_%jMK^-!N3EfOPcud5owFd1<=J!8PFmMPJEE74DDr#{pP9i;6UewI!L* zM`G>t=PA)Gl`WL>j5gss3Z&21y%iEE*1r4%#*anYliJ&fZt`}{R-}UFElfs~5P>^?8qS(D>%c0*R(!t8h6e0sV)4r zil2CX@>Rr;n0uD!6KG9b*!;Adi|-Hd;m;?l&8$H_B`=wcyyM=h#ok{2=%SC%cueT~ z+aOniFWRqz^*I*?Bq&OMXDQ<->;N#ZE_u>Y&7Yt#?{=Tkp0Kh+`}y#ea^^jStkS(u$9l zg@$SQO_EhCn5hpZ=N#pl=R$HXa@u|b4jbndDbA+NqH5==SSA z>h|a!>aOipCm|tOCYd3@=gc6%<`^|6?dQqf6Il?H@i*+l(w8%$)9li$)d5~T^zP8v`#kvVXkF*$5eiVaWubQAj3tf z3?ie`rNtm7lgg=(r?l~9RIWf5S_mt1%y&#aX4?{+T&du9c+5A-r_V=~^qddwAipLs z|4vfhr+fOBV9!>6=Emfz_?pp1^co@u{sW$#oxYM4lh3VOq_h*O%OCnj^WeL4 zbsa&SZ{3qRTjo^+x5Sym>sx7BAGF#o;Iw83 z)yg^3_|Qzyn9GJF*vHw*UCCu;nae7~3N3By`JPr^9$gxFhQ@_yX2cCzBxvSorc?*J zgr89zTrHn4#uDlh#u1t`#xuGy$}zP;dQ0+4E_JG-RcSM;vuk3h_yY&_bq%HPQ6 zhaN(|TLeJOEVd_0p|>p#pm`QW69Zdyo3@)=a2;6N*7Vl%dSL*gJ8>v718GANGUkxRj0Ld4)%;&9;XPhB&hCW<8b8&dko( zk-Db+LZEWjmHj&#m*F9%=%jY;3Mf0tNv!3r#6KI*5yb>dzn3OOs2#mqOJ-^>k`KH)h%QY*Y-5CKp-fjn{-!%U2hA zcyyni<3vAEvplVOTe|0JXuX7x6Hkb?bgk7aKkJPC$#elDv0{Rs&$+FgRKng^?6rHI=9)d%Tbh*O*x^{Q6EZuky``yB-)7PmKvE~JIaJ}aKhq`r z8KKeGXEO`Cs=G48_~X6*b0}rdtHy@){M`^`k?4gd?A&EYe|v6FE8kHKs*sdXST9u3 zu+?5gEr-*9P2kdzCm;5lwx3oZBBrxH_YI`%9IqUmbfIJUtTT$F683eYIagzQ zCk6E0kGU0_=))h=iTb4K1yvA^S^xrrtSaX1)75*oYEf^66QZVP=x5tFD|%FPP`_4deP#%)!h`C4@ymK_OuK!H7>uO#I((2fqnYnb_M~^RcivIXN*qu`^rQ z8ne9Q<>h5zWn*DuV**z&*|}KS>pL@9+EM@8L;mAAVup4Gwx-thrdE~|H_z37Z{=Vw zNJVw?MgRHVzm3z-+4S$dY( z9rz9*Hg*nHfxq4GKmPRhulz5!YX1G!mz+Et|Mk}Y@~3~lRe$Z73W)<(-_W=Mw(jpWFEezO>@fdRWYTaXhO&45-Gw_ z!NXI0P@;`SppvgqIF5(SrC5?i)${SYH*J6)c8+AkjGMx1)H7Wk&$epz+JqjXvILxK z4|m(XWL~@FZY!>Yi|#I7WeRro1M)gN5tLhKNdBR?K((2e^%F#L$=@FECl*EjNy=?x zj{?`deneq`{UBueFP=tWvB2()f{ID;um2JGg8B(c*;Mib=~XD;BKJFg@okuIy^EQ| zcpSU9QH2rJc717w?sj~Mm`ENhrbIy*j4ju^<$%Ds=%S!t!% zAU^Tk1^O)r1Ig0v=ZpAscXtc$^W1Tq$IPE9_1Z4olHuB%cA$W+_>@1rz|A%!{Tj;f z4UZ@$HD&LdWqZ7@s_Nq&Iu=dK^VYUCV@iUD^VkGIGW3u2cwRP`qlkHnQ>ypx<_V9p zS^QEdBqBGl_}bv26085G<$heb=>|)llY^=5Z?5!zy+L66(D1-+dpBokKjQfo|I)Mf zhto=dR2L6PVRLLofh*pI$hpiUN4TEa1DM~&$9~*@z2IkhV27^t76hC>mqlB6m<(E7 zC=E9Az>1$iN%}sH)HdUX|6xq<|@nZB8f(|-;QC%oa@nx2j{PebGYucwi4lCuMA3&ti*aCvdaH#7ohNd+(ah8ZXY|Y~A~9;8Kj@h7kCopIS5d)Q z2G83~x`)8yD#Oapo_&9aFfV{`u*{7#i7p2OaB?$zX=muaD?<;|gRG z9t*9(3EZ{hI%>H}ne?hZ7Q-HVwt0-9#Li5@X{;yh`uO4=f)`rUdw!6pOg~Bs8D1GU zx`YuK7e4m1p-vuJ9`ct);yo}KzYP^L0hjgpiKRyIyH|gjKN)U}ml!Ar9t{`U6CBA4 zo&ND{Blnu(3^E(}kt-x5G^0wsVb#{HA%sX!Mf85xJW}%D8~&kJUA52d1$|xUwOp2o z-=L$Mr7=H_C@_yaNUHRk;^n78cB1t)#7kx4*urdr03D&ZUQCX%q%78KZJmkcSK<4O+Ct_>&HsJF!DXp9kv1a|h}n zgz`b5i#a@mNQ{2bz$fuDv~#@1e(mAC{u+l3_s`JmBuA}A&#Thz^^d%0(Ea$Ig||%~ zJb5SuY02TmL-H&QA67vJnIj&55M^kEz$^5Fi-7QE7FXXfhS zgq+WwmYSNHlJXVqCkD;Z46-w|e2tRClOju}t;x*voOiA=($e(0byepqTID9a7kwjS zgM)({`+{!AtHwq~Ss~9;78e&w4ZGGKH8sQ%5B%pS<*&^Z5{47 z6!MgRh9B`mqh!84j5>h}YQ~LD>wbN8LM$)k z!5yqi*OTonJEGrKPx&1WEziM#Q(TsblRi2{v|cIj_?&DG!b;`&MEU zFC&KJWVyg{x#`dk)x2fCmK!fw?{@MooNFb7P$GLLzfXC|`Z?5;*yV)u;1Q4StA8D? zZl$2UB!!vAPe!Th=B5ZKFvfO{g*0APs0?#hj29~iUk&T|GH&<-$7MBB$C!XV^lqZm z5V2fNXPUrf7?AvUWnDrl!Dy$N-q(vEAx77g?({g!g19wf>-a7<6|ZfGACiM}CQG5# z%vC_f`|hVVN~i+%J|CTeR+V*HU+YfYL9e==$C91$Zj|Wfdjki9AFX!EGQ(kHg081i z8UD?3DMIyPUmq401QIfJN(~yhx?LTO9mR61_r3LcGLy08Pmtzc@izM%3KExt=isB^ zVU&AKX>ca< ziW=7QZK5|u3*g{%fWWvaqGi9AVAY^m`r&Jt57R~}R(;(S{EjA}TA}tDi)o&EQSx#M zaE{e{1BrD5=@PZ{TVU6_*28Q$&4&9Ud3vx_MtR(hEow(pt9BYLfOWnt{UD|8Q9cu1 z{zfKw0Z9n+R`7|v2Uk*7ZVM&fIdG>}%RgOB59P9)(s-Fr=0o@?QP>Od=a+ANSOSgD zT9`#ZYU0+&_)2*-Fe|~se$gm;Ra>s@*~UgC2EkkMUA%l7axTEl!Yi%De%2+F=4)1% zMMZZWPRJ6+KIF6-%xc{jD;kq>IZ@)sdBtTq*z$F#%(zd~|Gr!jpTls@+Zi1tC8fw# zht2VUTDpDCXV5V2l`qJ)K!buNCQQ51GHE%&MIqnw>O6kWv6C*Fr}^2CEGKlbJdRcW zBkGH>kbv1p`D%JxGJ(|}1CWA=oZ`-?@;IZ;_3i4FWM-Wj=Mz5L#rB=X>+OQRQgJCr zrRB6cm-$%XG&W1!A9hpai}G@g9o_hvlkM6aF6iWNpmK)fgEy|Y0=wQ$EF-i|hnnPb z(%~>nUppQo_KGP^LDLKqMw~XgSxOIZa5GR+_G{P2bDBG@_W!Cas%%Y=%$3`l|GhAO z8DL5zDBcT?G9zTs-7FIlS(>ac=UnMFzRiSswiuyshetVhD1naNLb<;0zT6Tzk#;J_ z0Q}u@cbx0%U!#(ixCASuyHzqKo(h}$xWYmGQ18)%3L`P{v-;zGm zv=x2b-G^ta?CR>S3CYRZ1;ffy98Ozt#>smz>vToy)3udOZ5AWIFTD!na~)!HusvIf z!F|fd(KU2f9hA`o1A(S~zmp-Dgma%e*)Fv45*n;_HTe`n-Rh*|M>a986r{WmyX_4U zZ+N%m!?@bjDZ{6YldZ~G`6NEh*r!i8O#YMsuh&&RHP1PwJydPCLc(c=rqMO9vhBkl z6T@6!JYaexnIZ+5ZSWvCLf2>;bS#wejQ^5l(mxveu|}0}JXiUJAeQ`#)txY_!uszB z$I%_gRj^M+Ps%-HIT0T5jcu2{NjKHKIN78G=+#zRlNDXjOtl7W_fxiJ z`4EFNJ!s|w-``xFALXePXnhyEn-c6>q(Ybz(|Amz>#WZzmEbt`BMQW;L?MrwzQXc= zEKSKW!>(vxW2RG8C1i%#iG3BW8GRn}>?NI*pR|XZuU$qxlmsCK#`5 z)4=Lh?|mx!<%^+^$n!F`+CHGCuy&izOFuy{JtefU8^R=)TkT3xS6azt*g4Mimc;@d zbA0i+enuZHrQCFuTZ3?FOmp@`9X$axtWV8xXOC2$>m_VCs`1pz~436fnk}y zmX6AUphz&fv+fMz-Ff^KJBvm?!K3zPx zlZ~-lXYH${*1Mtn63qtpGa-+26Dplv>gJ4(d%0^8n{Qv!cAt9$niq9uVx>$JH+kEw zgOt<>iZxdt{#O+f=!m7~T{JH&$D`&+RUNw=aXQYU0-Y>0(nNjt9T;egDO*QE{S#rf2m zgTC#C3(L$Pp-NYVBNZ;eEX)4XS2J?IhG{LHa9PCOCy_Nsfp$J(SS%B2@4RhFjJ~k& z+0m5sDkKEo(hPpxJvk>g$q_^v!rG3XM*OeyY0O3S8>0dFO2){es7gZ@)k9KFIdtWB zS?B~HL>6VzFsS9nF6jbaXV~)PL77pH>+ytf)NVD*=*)4kJxuuOP&v9M5vTlg@u$yi ze0&et8ZDT`1jubyEl8{KCmgi!_rWiZxh)g>OKaH{yW`j^4E~6sjcf54>qAOj-|agx zowzz&Cy;rD&Lb_)wPgK?mzU!LNdy-W!uwKh_>7&My)B4%fyGo=)&aP+O4Fe%N07G5 z#K=pZr+lN~-JCWp(+RXP0W;i!+I znV*4;&V)agx!e+RSRV%drRV$fVyKg5#Ux3}OOSuV;n502U6a~= zWEUxfhN(+&937^@ws>$z=5@JeIbDM;Kk2?5+uZru56g5k<|9J^`%NG}-;nDPK8JtU zAXuO)VV6pvUznfg2<)|VAmm75h6GCSoj63xSv$}95jC7GCv?Kg*R$;Mn7RbYUsR6d z#%h-=Udu9|+b(jZ4l-l4r`v>2j$D&XAj_TD(f!|@Mk*XH*nQXRU;N`?_yD9OwBpPVA1b-VYa zOwIZmy}aZU?Y0V0`cxM@@|2ZTmVfMk6oD|}P$<0@On~p(ogJP2{W*Vfq3g>7out~C z^#@MPAfxB8PR-M`s=s{z+taj@tBohWt`cHuPSXCd*Hlpp@FlL*I@jvRN;*^JYrcR! z9|Gww8zT+TeR3*DtQZCL6;{g(Tez0?fSV;TG4YJFRdBMPig99#Bg1xT zB{q(vSM8h_R~_BWQb^hUF%0Pmz&6@Wa*`M^ap?$Tu91{VbK1aH5E#gkONfx?>#$U! z&pMvTq-Z>y#}*ta1aJ;4v+rYM-Ot-D8?iAr!%>_pD8F+?Ng!^(>!0KyxEQc@erS6{S zPL8|e=frT51=FuLBQH2NP+vxK(@L$6mn^h=#joGSCS>H<$S*sr<5>DYK(C_sE{lK~ z)R%~6T;bKbWEaHAU~+MZR2{(I)3R9r?AO+|v{(JsTyGLT5tCNNubr8#i#;QdL8fg9lsCoUFx=Zz zIq_pr{{(Y$J{gqcNZvh`vO-eeS-;0>(HPGP1hpYG)8Z-%p;BR%x?@r1#~QmnL3KAB zx2=kRbQP2FVtoMYxHDts7HF9dtk?!3Lh=%2JE9v4UX7-B98bt4aH-ORy*XkcD4zKJ zERcvr=tKLri=D<r{+bIj(-4_Usj<${v>6# zJY4+YvE>@xzszNCp~=hoj6=kgB09D~?9#5$l9H0jGM2M^!e^h8JQaO#TgUTkX(h=? zxA)>}VBizri~2k{456pHbG^Tx0biMK-D|vHp-TUAh%IWceN-1g5?Xcm$WR9X6HnEZ z7<)`*O>y4`Now`ti^!%Qw1#yh(Jtg+et)6jZbW(HdN8@LZrSQ<{%;d`psdB(Y9dfm zEz+HyjaZ3h)STBoQIa6#wfz9FD_b^aO}SXsUw6Lou61rE#NwS;S7vGbez~IK*Y6vW zmu`NKaPC?P!a5M^WtbXIg1g&=*kPZhv zqm=U((L;QEusfP^O@X1IC)=6+nJx^zxob1^wc<-R7V(XD#N8r(c6-ZuC^Q;>}@)*e}sEbp`G4+V+O zef{|u$FziXxsKdW_6*y z0bQY?bJ&`67|qx8#lXGK#>Jr5a4C2)#SUPbOjjX;*T&bBF?|*q`s?5BdmuFP6q)m{x@uCn$*lb~4j?90ZC7E`Zd5-~yJYP_NPR8gT{C zJEe>H-+?i}QULD57p?oMRSO3HLSSF&g*aOQ7$V0hs0>YZr5DPzva&kk!Zsc~BP!Wj z6@i>&H2|h!LI!3zu@J&*#}gFrJc5ra-x@?s0_n>m(gsq;_HCqRN}WRZ6LhBRJIykh zn&pHrAO@a4Io{^`20IX$47u`G9tuVCB9ua30F_BJom!54$!m~w4m9Xi^ASY&&L%}8 zd$@=RV1v`DONfxv12>HG^%n#?yb2T9qkvtun4tjnt2ZSjAN z&)?jSmk!4E(sk}J7t+Cr90Kz0yBE&mTgWd9!oq|7UiC8=M?Q%GA_|K@1m`<65-|1I1;X)G!lcQL)BBe2oYX4Ajbwl6jAOP;QEN{LmEEE7i-Sn$5od9%U7+w}PCRrX=5pP~rm9fZOfKaoB_k`>{JP&A=pW!sA}RhSrkBmhYzD1ouyR4J8992Wd! z<%VYR3A%fGdjL!fzT_1oV-gFf*11kskKkTuF# z0lbp3u=J#?RaSBeau zsz(491U8I8^~WP3CyP?e7TY}19t(CCh^wpXvl66)^o@Jfy@jvkCot3v*JtJA#e&SM#H8uw`=kfvjJB0n+382qsz;~a2(#g5ZJEEA^LHw|1AU0fu;l4qY{)l}@+KH6B{_tjOb`J?3H9Q7h1%67Nw<+Q z7if4qztCY;v&=pqpKE>UWtyz5 z1=sF%bN$tHpM&Wp?01+<*k~iY)CXT1tRM{k@;rGoef)Spdgp(!bUv3a zc7I`zTTE5i9IcOZ!aoYv1_cEHodX~ycKjDumY~0Oo@jNM_(ApEj39ArhE#U-(Bvn}7e3v^2MQsW!-ksGi%jXL(x@4ZUd!+QBH)ai$C44OWVy$dH~)Yt`LmfF+`$IN+U^~{+Wr&$lJ?E%%Ii~@kC z#!oE&C{oRfuEgiu7|IUqtLv%Yw?gLKeB5dNu$Z!ESKBU0r?^18GkP{-Sq@;ueo(FV zZ)K|w4?tNArkQwP;5sD7y+-Gb|3aRW-*GdDm|Z-Stf0G_tN-@MjOzvyPHTun-pBjI zY%N@huW(J>_B&ow>~yv1rMT~RP=^XAff^YuuDp8Z-s5*PgFqOerffZ`j5G(?_xZWM z)WvN%wc07BZ3Ue%>f`Rp=s8SXx_h!cZGNI;Sk+nhKztKA4tySZh#-O~#?unOLDMx3 zgoe9+e#L0p-e6L^@7>+q0sWQ?3f9gj1~`}9O7F-f2S3?wTesuQ5(vNj>h5xa6_@#_ z>_t2m0B*Eis4%E2?e0GtKL8MNcRoD_7x zW!1vN!d`k_I`??&bJ>m9uMg)s@6A_-6<6JVR8qNjGVA5ljxm%i#`0WgJq@solqV5O zA+?TM@m$b&sN;yLdK80tZv!X#ZzDjCUhj9Y^gwim%X;~`1m4TNg!|$O<*Zp6MX*)B zO&149NtJ6qM3vrO`2|2MjWz%#oAs`bW3+?58B%5xJTYA7a-b~F`#bXNE3y)G@rh4_ zf{@EfZxWUXYDyRhkgte#0C%$RQFfAq%i?iW-?z$m>ujK(k>2lSSS&29Fc>$28E)k8 zTGSbK5vB;z`Y11FA3Zx#`mP3~C_|foQew}06<_Hz?GV$K(PJ^9TRCMLxTTj81k@_M z4Vg%-0qCxi_?_(Gq^Ln?AK;a5OU{o z*W(01HCNY~<-v(#*vu%}#EUJYUP$8+Ca)DgK(r&v-_S^v+?D@?{ci^1Kb-xl2=JA; z{uvn=-!AUjwgE7URz`Wd!eXKnrIt~*&bc#^?uPLJ8io-IaB_#T6)Ly-_;&w)ph_MGhdIoPO|N!D}3q_h}0&8eSjOMx4V(E>}F?W z6o7#GtFljD=3uJi;d5zPIC~MPs0= z@-!|fD%8Pgr*mC6?9@D`23QWkj(R@z( zB_ohM1I+<&7Uw{-W%dvr`_iDW{&=#bMJzjg8Qv4eF0nwTQ)Ax|{(Osu9harchUVF` zaUG8A0>!k~uI9(3M=+YM8588b;IU#ohx0M7Z3dfw@@%$33J9m;&2N#& zqyhYn=F=y1VF8j5L@$Sy-6YVI6i}*yL_aP>SE!l7%b-`v<6gG~zw>VD`vS}B>%rUb zN3Ls$I>K+VI8<>R>}+#_mvbsihg#Lv)679OVZD`C0EC1gA*D_`x>1!D6YnFq>Vddi zx#x)0ZK6+r$?lJ=jGSCqlxrgVCfcm)1Ed(}=ixx42}DEbDMMLuWm~HK0ut~U&7#tR zzIr>P43`K+(cRN-xa=Td!C?!*yx1o6^6F%keE*ld&Jml!tApxtgLXlFr=ZZ#l$Qpd zc>~rt%L+hDEPPQOcT5sQlQTg;9T5@1IVN{k8>EPjvbSa%XE`o_`io;sE@~NSX12aN z*L!*yWLuRc-g%%2tO5?dICf+D zdu375O^=4x3e>`Zq%5^SeJ&be3EWd!?pxVY;D1ff6^w5~)MU0SXZf%@XAafQw7p|Y z7p4N)kZzwaTI3&E+o5VPLDRtR#vp@_pkEs7yAcfo(mUD|2=xGn;svx8M;|I~gi9b8 z^d|CFf;0#)nfmaY)cQ?ldH$_(z@3j50I;GO2pT@$Bi`@fp3T7!{80c9cgH-YRa!t! zjX>a`(-}=qtm`V?kqv zPY+D^l80^AjDh>VQ8?;1f9AzFp4*ElYt_6_Q<)9t%#rpbInB8I`FX2=(lmjte5kA8 zh_HNr5vWT!D|(L?p!iiZC$g)^H2Zk%*r@R@4I?6HLta>ah;V~6^OiBGKZTT=?DdP{ zqTjUvlG+<*TexQ;)e%N5q3f~~^~&+P5fGB(Qq=%O|T(_ z1J0{}AwH!y&<>g08*M*D#N>fa?+F%UyjA%Z@)6^Gi5BTKDC>8{gtLYJ4kW^bqWv!l z?c*(YHW?Kt_N;p?hVIhlE&e5jMc6<0egC}?L}i?p7MV$Vi}0Yo_cVg=yj;cp3s6r3 z?`F(#adCWlp2$%J3OpAd6LdQO4IP$qXI>vJ}h4u-~!V%}5Jl zfaJEJ>O;iY*x8kR=l*I}!id^!%n09OfQ2bv&Kf>Nnq~lmWfA>#cc*O~WITD|rtgFl z_w#+^qsGI00~*h6PZj`!uxZnizIeU%{NmyU%sa;IA^UIn^#63DEigdVS?CFiBZVd? z@ZJEjj&|P81i3+t6M~#9Ixddx?*Hbnym7oaERnqb%frGS{mswOV$1pP)Ep>K*3&@r zNjqx(D@b`!@elAg>uLY7IJV>Z-)jBA;(TjMN2Y(gfg#SsDJZp#-w!rfABhY9#X>H6 z+%keKU75FFB1`Q72$mxPibQ~+FoY&kH6!WGyn&Q=P=N|Lpp%ZRjEQ~|iGKLTb|{1~ z@nB2-dF{M^M^OIRPhxKX8To%t`%icCf9v*-C;R`?CxpqQPxn9G1@M0-`MiRPP)<=r;gjX;+ySo!#8ngldG`)J^6d(~3og{Jp@o4|;o5XZQ0tw96qLwSy zMl}~h$$Y3=#m9}__hDKmw)>BR29*Yni$j~Dx9OjSPFzx6H%26ITa6~If)*Vj1~tah zTR_)o3~+Ehi6lC^=e|yXvRy#+L&|3_Ip+#W|M&BDtA+!{wxF{FXk?iIBm>gYdYqh& zHdJ!Y)43r9;R1YjAY^WGhzIYSI0Hx~K0ZEZNNIUNul?ijpXK0qF1O@z+47{t5&hL3 zE#sJQ3#$E#<5`J5@~ht|dZfIr8O~|*=S4kIP`;F@YLANJuX|q!&D+wR;u9*+J|;Km z=?Is4UL>@%j(W`#O`l0FM}9JEbDmpjRtWWk&V6cvajXBD7_aC!JDxNeBc#)4(M&JB zb3EV2;#sxWeB?Hku*Y3AT<4a)y)w+#7UZ>Fn!9ma*u`ehPf;X%bpENTSV!phZP4lo zU>Kk66;FV5gJrM{7Qk)L0p$tkvnlYwdCZy$0Qp~`ij@U7tqc|905#9ePA0hwDes2f z^0*I{iVTt>TJ*K4>82Be9w3+_pmF_n)}rgzee>XtmjDdVH3mRcZ8Q_)^Ag9=$~(~a za6YQ!esR;J0bqYkgb!wy%^KQ&I`;p<==)6J^^0LouXP3|r^AiM2ji#saqRNWcf*=J zXii*CDxGvN&OMt?6s~vewSB61J(kP!Mtd!E8tl#*jW6LNgGVs;fqm&}SzU+S-Cq#l zMovy@Rj7)cl&=gH(ZLfK?~~f|a;uS!OdMthKd9C9xnrM{AT(h*w^kM5057s)cC}N? zd)242tGz7D=I_C2(Q*777Toxe_(SIVs7p@Fv#U{s`09bwKMw0U2{=R2=*Pwc&0 z0XiQaC|Vo?hNCYNW~%hauqD z-8AC}l~17GE4Vsb8p814%h8xGp(|rzsC2)YA3IW~=_(XLoEjW>Jvu+yAZlgEM1RIo zv-&+(IolKTaGd+2zeuuIil$C`HAB=twg9lrqmvCE46=SJrx|iq>VHCQHc_QsP#({C zYVdVaC$)%-XF;N+(_B5KHm+u8PqH034ptk=Jsk7wq7w@22|5Yrj73Qd)zKHQvJeYE z)EX1FZPYoFQrJ3kC#!X&!=r4+&AQhYK>~+Y1)%ueg%5}3d3g!0EVveIoAt_9%L@th zj0;u(nmd8lY;CP=Iq1&WWc`H(W`Vj;Ret=uQSb8?!UQvnTIG_A>rCXai=U{%FAz>$ zUSr!j<{`1+Vc&&c9z!N)S?zshTvnb!M}k`K;EgJ}fNSS+G zE)W$1s$+33EV z<(5YJLYIzSVWPIn5JHZ+k@&3jJGauH@~}w6NR~FQp~8By!4l{oCYzi%OX{VO8&(uv zKOT_!t-xlShF~DfTis;;RJyEh;>S1rh*_!S%7QngJ~JyTzT*SnVmtTMN?nBjz!{G| zG`=!dny<1#z;MSebys}Iw>xgxPj3OhF8XHAwEC4caEx5CCd^XJGoE8Ol&%Z6Gnlug z>vW&{Pw;7bw;rpN3f;aHU(L1V;`2&~O)*B*ZuvtFJI+2{=U7jh?p8BHQ8nFQnYZI) z=EmkGgkMf>jCR*}P^hF7R3|==$zD~~g}0Plh$Ocqr8C8I%7Xa3iRSZT<}eki-TbL~ zQ@10B9}JmoW_Q>nct5?|kQvFJhrlE?y?kxRYD|88*OWIO@)kZ{rN$9Yxg}W46#I7V zwmTpoS1VORKkzR_Dx>av*Xs*pY0$v^q&fvc0;cea zq#OeMg)oDEoEG~hFVAS7Mg29q5=7~rm?am+J93!r#Ujd`x`Yw|-sw@JFj@EhK%o$^ zSbH>^q`mwh#rJL2$Pei}MysgvDef+5!*|lX87IT8DxW5{{je{$p~*MvZGPZcZKPRM7h%;bw)UH{ykcv{A2;@aYIGM!zqCffx7 z)1b#MzXKX>=BYz1pbq^21vL^$r`%okMfSBMUKm#fWhT98OK7%i)p2%iL+eItQw+pI{cYR&5;!UafJpOom_5#hb>IOJBTz^G#Kfy4|8!%ze zpK2QVJxn2%buLEFbD;fsmQ1P~kVXuFkPI!V+SfYZEpf|D)%%7EqB($I%dTWp-E^#f zq4NqWulRgqyl%KUq13x)gD}7ALEuyfcUdJCg{zzItbgb%rSmnur(59`RUn+VteMX8 zcsfU$8j{|0+m5Wiv5;0br)v4u?Bw}qL(jzzCAt9ua_ux0rK2LRtkj(m8iM#7-GPB5 zy%}EPEz*iv8xw!B;9QqhT(5)THN~*O6+vO)vevle0=|JpD^>>aUTw}HmG$i8e3#<{ z;{>n75@D#*e5L60H&+czzr2axuam^9C|n+7|;5YxX@RcEQ&{fN+x7hSE~0ZEoEqT}W@ z2{*cfI%gZsQ;<){v3u}O1m@xTBDB}dTkeOW{*O2SkSJNq7V+`r4EK{UC;v)v-82() z<0WF%KI;aDXdMPVKJKNu{N7N~tBnqQgzLp>sKLxQ0v zk>1u@omgBT)mX782D*v@nJkhX+Ch7cl`Qa*2#^^5)HLXn#y(VfZM{)YIXNYhP@a?i z5yK64fWvd?v@6mafb?|-CiK47xwV-!0~Wq$Wd;B~@c?h0Su9UZ&>Zh69}WSpPQXD} z>sy$9B!44TC`3@dnu*q1N_#ZibB-%~smSDjt>I*}75r|A8LN<^;!T3R+t@O*Yo26> zTV~|en%X{KZC7jN&5HI`vvvW&uj1GMu(uodo-RZc&1C)N#V_TmOJENC&0I&URx<(~ z_v}SA&*Dt$ET{PlU-#ZuRijwRAJ8#XAJ2;8yFvELg3pJGlPasX)Vz#cLFW@@n|pXt zFz4&~D`0WZOuAJq3ph-U+R4*f2U_Z$m$2Wft-006gV?W5Z%xfxp3G=@Q@MQPdcKwo zue|2>0!@U4$<8Zg`Jn*?10(8#7PCK6FHSwq4_|g97K|C|cANu(?2uTQ(QJcS(fPwR zwUd`eC3ufmZz9Zn(wIRQa9DHSbU zyMt3PMJn3%d``bH9?G~M_gT__+t}PIX0tyKvQ78vSIb54ih^QVcd8YM5PI<`=dhXN zsRy!{^Il5Bul){y(q+r_YSQabE={Px_VR7k0>7S`# zPnB+DkS!FI-HP?HafJ{$6W8>xC=oHnK4hDkR5pnCcsH?vBB}0j!;2f6;lHp~!^&EpS;A*p6@>U?=Ao^Pu|webABSe zxOv07_sn2PVsSkaDnqvS#=Z8TAzxZE&GCa_dQ~MG%Cdu?<~w2dxGYSV!LxlLB|ozG znyRAVT?c_B(2vBNk>obRUv{)iew(z-G;;HNy?1n$l@A+dN4aE~YGQG|?5{4A*V^RN z>dw1Lw(kzP$vM*`vhJsq$~f;2iMPb}m~kncc@?*#Qq&{&G3lV=`E_8 zOK|ErJy}WLdL$Y;uf0ba)iIv1%{WlL)<&@lc+r+vDY55MJT+$L&h9#7%i>fAjhTTE znZ1NXO^=Palj}VCM56Msme+fmL_O@15vLA0 za|E>!u48hi6H0O{MKoIw;7vQ6-Xyb?t8hgRHopUJ@!~-QFp;7@kh~-`4oN0cU&6)@ zHd7Bjmk1#?mz*Z{=SZ{(|7dgP@r+W)mn8GR9}RjES&Zq3V5asPXh}i-DC6-pxgpk! zUX7@~NOjL^q$@!B$SoIIQM8v7Cj%%eron@g?%SqL`DGj0`(Bw-F1S}u`g|GCvh^qF z@^{DPCL43iMSFXUvC`*JFKk)tXGJO)<58i{6@En2Qgfx6!wfZYGNAi)hlTK($*(|U zdBoo{ZSMB9Hufm(yFY!NSYiNIdf4@&=lVGmGFz!mgFEbcC1Y$?1dI5HP)uFp!iw}8 zX4XgH*ArMp8(ZGMWcPGmS{-K~V*P=z2Q0FSZE4v&X$)%P`pw--^n61BIN={DxgxKn z5!1Vx)uUc^MiSW>H-t|q@u?LYfWl*O1<25V=1!3bAUSYFw5@o z3Bg3tHGMk02z3BYHaYWUJjz3mi16;4au?G#5w6sT*rX}a&U{hzG)rtTr^3~S23_jG6SUqE>! zNfbPaRnl_+k{qVQ&}iQk-pJ@(p5xNwG^54@dSJg+2IJ3DqV@IsckYXjDKS1I_rzxp z>OGgobg{%r)la`fD%#JCHM-}gon%={_A06H;7^tMz4aBYPu4OQGsNO6Ri`3yz7Daf zPnr7jZQd0`D9ZYev?L^m&9!3`DGI3g9Qa)T8$z0K-txl=fwxVZ=C4dXc&;Q!$})+ z?FIfzAGeQDMt-$vk3KIl0fRQR)4LD8-bOC}ZjsK_v@y>VKiF;?uj(^;kl#)2D_MI- zwORh!a=MCVChi0qVoSH8p3w-0UDlAy?tcz6STT%C-QSoHHcRRny#Bz%t(+`%czxkw ze3NM}yAZKQ!KTP(DT)QTd|>5c%t_mpzw2o5`|j`>FYq6kJu|toL9aEtJp>8}EtcA@Ys%n%S9VKGwbR zd00<49{hIiVd69Xto(A90jH6K{Ne3#x6D;Rf@ixk!-t<5t%Y;24mmG+VK)^y1edlf z9G%LU3Kh*9lnv1Sw7-1%vGs^!{G5Kl8!u3TOcF({+vYU-I!5f;mWf1Y=6st&whE4y zXV9kVo;(0ABe!WymEyEY|Svd;@Z2)o**KXIv1?k=D97pK!>AjzrE zQ8IZ<4|3S~z2`~Nag$BdC4yM~&`o8}YwK7qIdj}^h>Y7NmMyio#y4mKA~kDVsEN0_ z(`q7VznE+{ksnfsc-}gdUh_1?btJ$#G01FS;-@clZrf?nm1L+7URLasW z>MV42r7fe!jAuzeEuT6Txf)|*xReQUrwaz7g*K~Ir*soM?Jr(+S3o{q>DYcE)=D6$ zVVWt5i!0cJv%*oUn__8yqHf&?KJnDrHc@_0Y^~7l*FxaEy^DvekFG5w^GLBY4%iRA zRHEw^J)bh!S9m5M?vg3QR4V_ss(1TVX5hw60$&_$U1|-`g?JU$$J5#-spvjPD{7u^ z3=qZks%f4ZXnd5dVB7b}(fQPqy)rVA0e7FxYn`MS2Wk?@Mfbj#`a6Xr>r~;j}%Y#DJ>iQ8QW$F zxE|*4pq|IcTyuME*-YZSNrn#Ha)fyd1N~y|^JNhQtYXpr@aeI>RAFnQCq<`I95zyA zXXN9oS7bhe%qCm(DgZEJBn<|-aY zO&U>)3cCncQC6;ZKE_rV(x=-UQy+;c%a2Rh^43CmXh_VYlBP7Gq);0UouQpOC86Xg z`Om+5uP;2ID4cZfLq7Py{4$`Ozuf85CTdRC;oky*_aAl5ZwL=+(rwWzVm0)2MSTbT zAxTnx-4cgAdD>Lo*mER{UtO<^Cm;WAb0tKc+?lHqUl~h1yE*htRR3`!yoydI2EmBP zT41%CNS`Qk2+E=Tu{=#aKw_K^OLhra#LBUJ-uFA>#^|PRTb0sY8LrY&R>NBk8Jghs zs+daR9iT|te^*0P*EjigMepvV;!jM*q5ib&DHFIPY?YW+b!mLks@>=&oV1r~R&l*@ z&anDo9e& zh`t_#IgM6I6U}j~xJSV-Nqyc-(SJoXd*^F;xdvCP=j}xedRugK?6`aFX@>!!Z4poO zE@s>JDmFI)bb^N6GEQx;xG8B%wfm-A!#Jo#Kw&oh_RyIswoDl(;U^{zlb@T#8b#UN zM?=RJxpFKP-xex(PvJA~>|h6z>z!zm`xi`4<2GJK$?SZtD66=x(e~ZhwcGn{!63&q zSYu&YgIRcF-GUjcd1E9#)!bn1*63)-<|W!P&~3RoRSKo^FBR4sV=XvrS)q1!!xV>dT~u(3qbmc2OC8MuIZ;2X+yK{kdFqCwGqx%%UC2oNkZ%NxA%}Y0{^hn#^|J zu(X_`>_LkkNz{ML5}2@_lFd1+tUrhkw>?OP?oE6#KRH%lNw>KrROQwuy;-deKJ%mq z^V%AUPwUP>cDd}g_JFit>eClLwa{s`!&SnTk$FmXzH4DF$7yEJluI4k8ZA(B zYWn;c^F;SO)Rh4*zTR_Nla8Zmp$XuVRLX%I2-vo&}8W)W#YU6fc?tQYA zz2$}jZkkynqWVRr%5x5W)xp(Vva4&zt+eg4G@iw-9iO_BoLeMD$$B*hs&80ddNd`V z&Z~dTsWk~0U0l+Q>a2{i6IL;v1rq(-`N|_R#ng*8)lv-5NgQZu1iNR;$vnGibv<5)8JTD9;=^;jpfQ3~ex3o^_?!TL zG`pyy0jRqXF78H<@Aq`y3*B^Wn0JmJ?eeaarEAm?GJ*Hv`|^a{>pTPenzBsYP?c>yYm|F3@MayUHz%V)@b->zZ0b27pB5D?#h9!2 z3evRS#vjZI_j;kv=SZ)UF|rXkZlKfjtF#DL;}7Fv#OZHw@uhrUndxD+BaJ(^V3>PbFO5_Ft%yLd;ZIHWp(ZiGeW*WscYe`+^DW$X z(mO|!2*#8?7fKW(F8HlRR&epn{*vwM-R%(rZ41`T?Dg=E=reT?k7-ozKO&)B+x@$u zEe^nOQ2g2jU)foN9|fyKpy3^y)HJGyj;7yy%Q}ylt=@D8a^?k+T2zZ0Ze|AxuO1d1 ze8^o~|06J}IG;gMYSUfeB|4nzP7QGYBLSS5MErbs*!Gc(Kh7YzSEB0nOKP6Wu#)Ja`VEeeoiLN=9d>8G+la%Gl z47?;B&mB~C+P*6GUFDNZUw*Ti0m9tjSpCI2JOz~z)BlDf37^-cSKxg6mZ|mSjaM(v zaLJogygGaDbzN8;#MXd*_gm>xc?CuVbNUnX^ylxW>0P=#FY;|IP-@Q|wYmuYamWcl zviC2qkbqr>T3|bYjaozT!3I}+0OYOS{J2xQ(lr(PF5HAt?e|ybwq9^opZX4rjBNlg zxMn}KvBZ%|T)TqiPl}Q|xWBA&pbT{sdfr+kEb zp+y*Y4vgR7Hzu*MN@$yp_`vG)XxIDP5lNhE!tcF@GWOWXf^^64NG(ckKwU&D?Mg`` z;@JjAtUI-IKSVX0Ydmt*&57Bp^F7S0#wFcJo9aQzSuv4vW4Z0DC~AQ=KfL$QzQ!rF zE||?=F+lOi_Ik*DW&{P;Hv(GNR=tx@Ff;HxS37l?nvHvXJN3oSvs6ziFb_tKd=|Eg ztgk6Z>b7Ogy|!ZI%#6_*mo(DP!!MQgGa1T~+bMF+LIrQ6Fkg2qcPJZupVlfPmarpG zsX{ubOkl`TMqW|gRq2r^^!ScB>c`CVM_cEdkxR!9B_8-67GAE;FQ*`Nol`{qq=0pg zl6QXtQ#!|BXwrhSE*98$@*z>Sc2REgAoKX!TucDvb6n&-wIGWKh-i8B$m3FP1r)av z&cEFDd#bRdobV-Lt$C%SQDZLYq~sE87HMeF=v76>f|&m|NZo93nHJd|>FVQ5wykf^ zl?~`)nIC30Q}5>;>M{=qx$em8yfjf;!Qu~~RjYd{jX2`~09FrwJ^-L%zo!BUiWdMq zjc%pnQ6~c%&(x4($cTb!V)-pNOGz+;={V_?Sb0;u^xpE;wh}Oesv19@lcj>?{sU_z z!l3AWLFN~gvPQ$tKfJFCJ$nm1BXGA5c($@!ho})@&nK$CnCcX3r^-sIAg-=S-g&br z6;4ES%ONP9v#^63#VE6*1Lfdqy`M<*-a>Yq1+C_l7{=c`2~A`{D7uJ(0Yl(`!B7FW zXR6&&qE8lzZn+MMukFm%Zx284fRqnIi4i3Y8)3vR_>AF!R#!EDVZZ0e5m(}Zn`7D{ z$g$6O4zGcwX+-{1BsfV&CQTK6(N4&1oH1ILj(D+-_8?Xs%98+ntj(zpeADOsNt2Kc zKv;{w*WtXPb}^{p*FV{VJ_PCvAZGL=M(@pGG`~8Vd_e3JYs=QHePP3*g3PFIB*2D~bk*oo4y5&>?o>n-rBCTF@R3jg3S+m_ ziH!wMu>XDl2y1yq$AT4mI_kJC0t#qPz{|HEOvi%v9vxU)JI(I@9bXW3&OZTHt=4BC z=mKBdg%uBIR*jmgc+;^!i>J^{Un}SmFvDhZMwFcJyl%(o-$B9XdtJe7T8oVR21Y0f z=8FYIf3uT2_1lh$#-ZeWFt?w0c?1rc5Cx8MqRINpM8A(Pgt+_*yE}@Jz|_zuXE`}C-jd=Qzh;Kh zNo9oS*sqEE?CwV{!VJ5E7<+hnuvb5%H)bwMr{P{2H^E!k^({GjOZNCQkhu%U{2XFifYoQzR$RfXyjEkGzBaF z2ih2{{`wZ5j5ufVoZR8A_NY;E$hpG{Qp>EAa;W*{h>K5=yR|asypVh*NpX&m>xEE3kg3~4Bfc@4*=xHUrSo2o-;fDaXXp_1J zOvIJC=(-%*mMyc!Ql>0l$a!qj(MUFb@n30>;_`u;;|i56pWaYh8^GCsfk^;5t+oEr znFrsF%L@X=>`=T6_VBp zG1kShk1;A%QooahVdpR~Dthf;zj!GhS;e|OaXEKb(x&StPx?h`4}@oDbT(pdv^sfe z7y6OJzO*HoyXRUlJTi5H8uJ`EwKZ>gc=_dl$fBh@Fh=Ujq7H2zAir&8Db|};Y^f9h zXbvSV?%-|cdPcaw(D9U`jae!)3iQPJdD>9tX31Emi);<#L+1r!IFMyfuBI9`nDj4; zkVZ?^yhky|w?>Z=3;YV@d?>vrU?KNbwzd3Ib@c7rZIv;!SNlkZ2!-ueZ=Wk;hw%;f z>MyyJU_AQSqOY6@sYo$ei-)>IhY|BD9>l|(R^A4*7#VDe7yJ?^JiXy_kziz>F5QN} zl+-*B#3VLF8a${Fe?39g>&kGIW4OJLtd`Skq*l;Jw*VRX$MWBu~%6oFw4&CSvyLCtraf7S#{HC>8bGsMg;@TGap&yjH7k0F36!2sjzY#qpAR+u5fQ8h@$Te|`)z${mFbY96)sSOQ^t%f?al*? z(Z<$?;<%>09UspXP|gy?AP@A5bE=OtXc}AQ4fu3&R#Uhu0gYP84mQN>+RgZMXhD}D zw(dUlIWx94G^d{OBM`31oM~~}?IY^iOpK0Q$cncKCas<4Ns-1uNF`O zpPuSg%V1Lj8Ek}C*lj}W`h0DF;DZVzy7MqpH~nA22Se+0sZM$2=+fkRk>#Potb-lh z`WAc)zuw3pG(pZXfc#LLnoVck`FwO0!!PSRJ$9n9)wTXY*EzVz>b?EFA0t!DOboYp zY4qP0?HlIj3cSJbk4FXuXSn!4@#f46-!X;)g@q;j6!X$AoonxLJ2nx5^?RH3aV`^w z-^xo1Rq&-0@?a!UY8hsqsoSb-hm+#(t8J!mRHLPO-8}!S}TgpZ3)O-R?Tl{$tNbh|=V1wY%qUK*F6h>e4TemtG^5e9(e(3rkhAO%p<)-}`d1o+w+rv_OY9T{Tkp z0`7P}?~?=g-!)BNJb9t`^9-1L*`7v2-Dp^Ka6#8Z93G#6+kKE) zY9muI{GMbTC6iM}XB;`VR=7D7hk_T)*f@7)Y{S``1(pVy?UO~Rz*+m#y&*SZeok87ZsHh#ifE>XfQNxJq*CxEM zH!FW7H?@xVbfGhukL_rb^TiVH^bqr zd39ZPbrB{q8*-fpuM7A5=`CK6o+Kqg<22ROSa6ZJ7S!9N>_MmWE^Ny)lp3BbOt%Gq zlC>Yz%~!vy`T_p!6(Y@%Q`|o*=}VMIH0k6&G}KIFq1{WK2iPeX>K~>t^$%l;DE0z_ zhL5lkoucNC(AbIL4KBh@X3D2M#}H!`i@QUIG@tcu*id4y)YDc4n=L5zV#B&vPFS|y zPO3v^G`0U%1@|Y1tA{0pR2Z52Zqan}S#OfJ1dyV^;NHv>a<{Aa{>VzlcVy6WMEKI0+)4n; z_-(I|Q8qa2WwRF+H;4)H(rp&P>%@y~No5?`%g2`~b)dr!$`>v50uzPRk>8?UJHjS2 zBPF}qJn5+68VspM{3J1wxVk#;QuWwck)xbw z0#PDJq1jbQt7ww4sI>p!Jt!VbVXoB6rcQl4?T3e&vyNI?Mv-`x3_WXy?jUT6)~G1x zapr_huS5eJkm|1kvflrt=zAX$TNetJdy}3s;;bq{5-Nxg_pRPK)UpCaJt4bSLuEdAZe z4V2ky6&Io^Vy`iT2UvOK2CWnYDKg~MM@8F13b6uK@U{kBC2LbAJq81{r}Pckv1%xb z0>AGVrP8nBSuhW5)Qs_*d6F9AKa#NmO`DLx=eQP$b<_MFY!w%0hs(%@oin^}nQ zbyuR(+m=#>CBsa^4Y{*>2*t(?eQvl|L%&BUYBXM6TWk7;bjoZdp>tuqyhg4jD*~T> zRwwpLk$(Mf{S-lm5{!gdi$oOdMA`O~A}S9O3B~1Nvq4sSrT5m( zKHEN}|0e15rLDyKz`f&h%kRZC1+9i+hlPsua*1*_t=QhnW2}F8w83o3-QcCoZI5Vj z8jD#B4E&_S)-A{FitVlf?n@RZ*j)CirveJ+^A_U`16{19NTF_Evpe*f}$Dk6-X zo)IEsU>LT6{(Yg(zSc%I^_@S!W5#2h#4lIkO-Mr+Xo(dJP{C@OslJ zE+2JadM+Zd+BRiM0%n&mDV8^oW)l%>6CO<`)d9+`Ny<8^Isl+u!p2EDsApAubW)a+ zTN@in%qu)K8>yaObHx$vpH?jLUu0{k zcpz`=B`b2qp1~`kX+Pmy8;EPFh z2h_c_iQ3KbZh<5C(5Y#&7!8dIEAMvD-$j z<}k!aF3MFXD>SuoT*cATzkXU=y~zzqpTf|UYEhrVFXgkAv@(*vo&Jgz2hJD-#6_4 z`yKDAcwcEL8oex^fBCAiOV!B8tBEpHE}A%@8y|hWC0vpaZz{Q+Wvrc0_w8qchB)b+$Y^Iy(B*$&35FI^cU6{ex?1 zo5V8f5L@6*bVI`Jp)yrMSz2OPVNsaVn@Gm7j8_fdW#(X=^O_KOlwsE+nhml^ls^^V z<8A%U^giqk=z1vr(uGFq7TsU&kdCu~G^r)uo*RKtb(IOuR(JJr)kB<+QrAK%L8}JV zJ0kj;>!Q09qoBb=tFAty_Ifv7*@vB&_JC%a3BEp7r;V$6v_`d@B)5oVdlq={>ejr8 z%`~fXTO4lHuL_6t*ei8u%om*wJ__Ii_p@F<7}-s1uZ@;iR@g4i7Y-b`Iu0tn9YJ(J zJlEMPTpA#9<#HrJDSrq!5#XSW9R`77TWLqLf8fuc0wbXba@%3D!51uP2vtsY+2-j| zct#~Zpf1Nj;_!MP)##1EtTwYT#&o`ZNpb z+Gyw`Eeqy(rViK@ToVEgJ0CG2(htx~%j0&bRKN;@?JHiCsRVMuqg)7&5$_-ikQVTq zwK6!%>4@|XfvsU<%qchD9C%%`zXDT+V(NO?=Cws~HF{3YVd%EkulW+ij`=a`sH)DI1Mp*G zMG+v1!I=!AGWG#2s4TG^jr|LuDDcJE+cslpYjWgRpXD*=LZnesPE;5J#9IxeZ~K`D<+2I<5nEXFKmN|ElZ0RKrpcePi6I@*8`RMKg`?(vS00`PQsdMB*{#F;3|UGycj zbIePnPrkkanU#3Z3{Nj)f7TB{H|RJ_fCLho0|11Av{ZnHnnNij467x>LEpQ~u@{_}jtpzbmP)Sioz_c&JJ=Sj+CvWFZPj+Azq~W@(1#+{pUL9RtfxQrA}74C&27 zolsYLqSi5d#OCO#20rX0-Fb~_rwtHjnos&tX`5MI3>Zufaqlwfztlz2RvKK|0m|_O zRad-vm1P$XwL`5Fbg$r(HL{zvd#a0^^>#d>$xH)ORMCX`rmiRf_?){al%>F|3eQ&kbf1W0Md# zSnioOi@*5w{fgH8uiCVtrXbBrL|a^SHnX73W7e~PXIL321p46$PMFIt;9`HrE;ngd z+xV;b93U)Lch(&HD>u^Z@}sU&6j`NlTl@QdiK`n`cwYL-SYEZd7@tX*Z%{^6`V<|j zG_2I?epB0e7jEZz`FuTQC(cl`WJDU-v_H~5@_slr<77D4JlOj@t``hAe5!~K=46Sh z;m^fMX!g5yYmFI!ydIlJE+R^TN1}c_kU`x4`Icj%Z6LKvcX%bqqi?+;PhVTcu#gYp zq-&IQra(Ve%S9y!Ch$dnXa`_pA8X~@C;&wb0i1&fy=yZhNnOn;)m2ZUCyS z73>&uK{i%>&#?fa0|huqk!gFIN7Y*y0gm|^*+zKvD_EUaSwP7hpxXw{vg^QmRE%=G z8`H~URs=A#tdE9GiGz)}<1qrxOCYlo4D?ajX}ZN`5-5l-$jaCYdYRA@%AN88nA!3w zAcxT{yB|KQ<15W_xz^l8y>wzy(1PP+Kwyq#Rf>5o;DC+Mll7bVu-MNrd{Y24T84~} zMbBg}-HF8|)X-Pt{xo~>ol+3MMLT97mb+4rbncC>_YLO%P`)3X*zZ{bsbBTHl{@M7 zC^zHfQjzZtRKN~!1Ut%>u=O5YcwJMDp+d&Lt)&xZ zfYE*$HhPm;)4^`E9z{ESeERWenG2c^OWFYE6V3FU`VB>)L;b{H(S2U8FrP8P+axcpdRg=blhe z>6JsAFnmTx+v1{=;~jeglvYgWDbe&A^q6WRJ=0({tIQIu!YN^J8Edr@L_(o z-U?Pe(>;`W?l$QrEbONqZPssu>8JzBuC|t6ZO?#T+_CQtMzI-h`blSry?{}T?xcfc zG2^H6GId?)Tg&%aT}>NxqCE5?qEUC5@Z(b%-_=ff7ek7bBbZIO?zz(?VO%E&pu4Ev z5Tz?N6yF|Vs*Ubu)ncXZsI|Fmcsj?1^Mif`h6u1E*H_U*sA$;1J%;+4xz*eddkDfP5>tg z+u&7nl$fJ#FUTRY-Qpqqt-uxw%sNj9PL(s`w9!p@4os6VEyVrK-kpEMP~79xj#WBd zF;9-)-H%RX2q8l|!dfAxj0!XeF*z}})#(Mw9QL^AvPHS4Yl5Fe+vR5U$|Uwuc&*B} zlx}`PpH>5p;$6Vb02t+!3>~}Sye&s*ne`=*tyf8e^~fta{R<%+v1T(mBaZLQEfDyj z2eFqCln~T4EhNkr`?wU)#HN9_?PpF zQZ@V|nqCfd-r#|6L zov8D0vR_V5>2@`5vL;_Z7!>K;^CDEi<5oVt3}*Y1LlW^xJ4SOV$*qmaXgix;v)^66 zO8*J7S_gdr$cT0WdB>)KI_V#pg-7i`Agr3x5&mwhwQ&U?OTePgZ)Z9G9tg$s*k5ly zN&&Ni&!t2_@Yt>}QyP73KK)Z-Ks_KX4%;2Y958rTsi$} zFE-V4HZGIj6l32y$55OoR2wxP;9P&*=-@8MT}AU+^Cs)nX?zfJNh(DYc}%bzoy07F zH@S9H74riysi@P)8!njGF++u9-?{h#ik?ZL1#R|(ifhxZ?U+jBqN*!oX2`m~@_ELR zFUYhsRkLfEYYqmx=JphGw^oKDcPuS|p0Q3f5`=)vDxLzoRvsBJx!V7Pp zrrrd=FO1jAPt}GbJ0T1xIV-Cxzt%$fp~C&LBwSWp*Y{=Eapu_ahj}28p*Sfqlx>ya z^GCIj<4_?#Xr?lqVXKmbZ7BdIx-b{O^_#QOqdSQJp9`v4enyR0V_-f8QFW%gheB4! zgIH1J7Xr!2Rbw=TE~HLxCYjb=Ikjk*p}+xHsDF?G(!6#rooM?{Q4Qx({{r-rZ#Z!w zqg1oiu8VNqWun1Nu4DqrXq2mw47SAhoy&6X7B;lv4pMh8`z_tD%z_cq(TE)$H?*-B zi;*D1O|d=DCr#)A=RNP?^Duu4zeP@DxD&4hFa;Xfwb;<{T+t;nYCIf4A>(9hYp+^GO@5Mv1LQ>vTiWl?d4#@YxKc7!KzZx4^$iti%q zrxZ7WK@=*RNzCN@N)O}TI@pi$da@P#>UIl0O+RW_Qj&IN(8+{K-Bwt2B*tsG)p~Qj zTW;Z*Ibhnz-Wzps-c)wxXzj*40ilTAm0H6Re zPE7zyK{O3=SaiD>v}mStx~eHDG(MnhvIGQkHTLa)8UVXhsgEyHE*vN(8o^u|@)7$| zG6*o3NTy3!i~?qP*|J`tAP~S;(~yRA%+>UVzF?JT{E-SnAz6)092lvQ2m6Qn^K#(5 z6t8vo=89IwR+qAzjNe~w&6igmC6vMd&1KNC;+>>1zit)q*(=(fcTHK#Vc%u)S2tC3D*UvUk^+11wk36xS;>oL%FmHdYMC=B zwy*t?t|+B4nL%6wXRZpHZx(3f0{S;*$)jWWQDO1>_FdZs{LyL&X2+5am+vt+0D*%3 zy%f1LCrkHE&Lim4_fA*=TwPUF_cYXO(0llPI~%-c5<=?-__#25EOQ8(pM4HBoRObI zsrjV=o3%Qy5L1P}N|udzM~gr1=^_}jg#eKkN7~ra>x3>rOZbW?!|pB%1A5mLrxMr3 z&^YeRz;Tol0%lh@MoYfs^F)i0UW~?SCb)YMz)255Rji{10iPDskmJzgB7rFYZ*K7; z-GiG>2sxMAa}4}#^f7i~!^Yt8r^V-Z-EB!fev5|3$zXtzxF}GId0j9vhl;QEoYtFt zZYo|5mWzI-(A8A5ZJy{gubZB_^oIsTj|Wr8v4b;JPcqreb2w?M!bz2t0vQ3e!xH(` zg<4wu(pXA7fMV&S8G`f)4Yv$A%aakN2}1n*njB7?s@28TTlFgA`nhS@f?o7N-fD_5 zF~M9OV$qD%W`H1)Axb=ZGy)T0Nv~K@`qOswzvR8ot{zdkv~G%u8d~Nqfj{ys>9Q5{ zY0~C_ZuF!Q`vl~mEUdUeyM^g551@SCx$AkQ{9ybWyBYVAVJ;k1naAtx1Brc5ZmoO+XYDefFfV@w>MDT>?E~F`)W8AKX5o@bF)fTM_+2PJr;DlEf zwAi=Wy_m|s-=nFhW6b7Z^K~CtPYD<2W0QbCQ~OF!m%VH0ztzeRZnzcFs|DS~DqS_t zx(wbqUPP}^8@s%3%k@B?2Cf*GLg#}SE{Fu;1EF@&?YKxVrhjMEey2Sb>U{#7M4KHP zQTA^DNT(KvX2+RG|Ex+`HJ|Wd6m0ta^B$N>{$tmi{vSW?+rO%7ILYC67D%8pORo2+ z%P%IPjV=#H@djKMvVQv!m2a$Jh_0M}FH`*vGdmw6XXh#TUyCQ|dkvYZ|Fd|qVBTHg zv?M1ExEfUJvR+vGj&}M5W{Cew9f&f|+1aJ*tBxvWhc~j4!*YykN@2?rWn|5U{$M&0 z{D|wGGr?{tI{(TT+blu*sz0;U^Hv9Dv)Y8As#>SHNzfXKId?{XjAKLOHyTc(*Q=@U z9So6i<;B6}#rausXf4dj@WU;or);-BNX_-9TIw6x8i9xrK2~EpWAJOvjhESuG$qE9j$Us_8exq;Mha_X}x;=suYo=6g zYX*y~e-hh!M8~?7$T>9@JEWurmCF7yDR~#Z+sd_VHc->7R-7}VxUbKqH(fRdjBZN>2ib=y5{~hlG{4VG|tW&2c!w~asS(*DRU=e{0 zb!u!H)3W4rDO>5V7(BygUbh7nd&yve4eGp%#Wjk(Z~uZ6`)wKI-byv-hFw5n0yszx zUq64JB$fN0aMRzEZa6fHq2gZbhp@b<^|vx^+N*`aGREd=DRlV3hTn%RJbL3ZQn(LLv6> z=@9W{4jIjrxTSz`&-olK!-O$qexdwU$O^0{0&GZ!o-s~U{;rBFS(F#+D=9>p*V<$^ zMu6(TWKD*=GANh0HsMd^@gdysLr)0uA_ob=ODT(2s~rqgxl8S-8R7vGg3tB46Uz@P z(V*Nw0Ur<8{ezCod{Y>ET?Cxk$=mMR&%}1$F}wV_BV{M@Xl4bxFpB`#?>?z?OwO|`J_s#ISy0wn5QVooYl@Bpl2?tv#G|^NxjOa5 z-gk%BFsmLq2Y4J_-;E#)OO;sKt}v^f8l}nx9K4VbS^d#@5o(^?g?Eu!7aP5aPOYO| z1NQ5di)-ffLybqlFCgf1e~mbG-~(29fW@@kP(b zm+xblgl!u(x_JHY?l^zAD8Ba7t@xEz#1n-rj9x@zvJ`~9j8$Up-SAOI^ZA#!VcPD{nlwz0St>1Btp z==i)UBiwvibK2SRKMm~V*%%*1mae>(V1LY<6%JCv1naVT1vGkxMD5HZS|)9Wb-plC zJ{&fPv>E_OdFyt*UPjlVm)58-Zh|0=^!Jc2Jl0kCK;>;f_~GYD>jGB&^5?4Ey9P)` z>U}FXg)2}`PRZUSZp6!!S1760IR{?2Eu3fw+rbx&I?HE!&W5j2X=TxI<<~i_7~j+= zq)d@%S#>U}1E&E0Q0WDb7J~qZ6ZAuOItSb%(dKm77s5pL`Dsa8?$9Fv%u!+NO#fr; zqb}5>d92JDv@5L4BKGeGKLc~u4(4E65K|$?goGc*(*_iSiOHA1Ed$|fwNW#Vh*!7B zABKv6`gP|>Rz?7~vO~}3d$D=IBo1u6V5Ngo0kKB9Gq#yL>vdoeL=$Jo=>(S5i0KG% zPhA56+hlXaWQNx##UNxqkk7(k;}ELl6Cb?}XrOLDx8*+4`<-kF6ppe(jGRc9dS?9` zjrF}eR0s(Wa{9y|vFF~v00w8I?!(Ae(IeIG|Ha;W2V%Xy|KlYhg=7_psBB6`_FXbE zLbf|8d)*|f2;FUDlvQM9?|oYtB^24)Z4}und-J{Sb57@+&b!3$Mv{`tksPjp}^OkinQblfMsrn0}{8+nr-#SuljeYqJMlNT( zBrVlS9#v|rl7H$*cH-KPwXVJ`s9~D#IHJW+*YMTaD3~5Vp&AbPfaiC_w)f8WyQZcp zUkW=UC>;3yJzKr|K4yj(!vl?;BUmj4%dK8>n+5kPkW3Fa* zrN7ioQ#AftgJuw}AjwH9J`}h>t(&7s9_#1e%NfvI8y@rGct~zrqMEd(uG0YNSL*Kt zegbCt?LeFi^H1lXI|r_%A%7HKP-{>>3kB`$SmjC`+)TPEjZ3eKERK3FAI(tgoxE8! zKrt%BIoh1W;nDU|Va4HzdGTer+31O=7kRhWDzOU_9%lM z#K*^vM`+xlz2h8|9S$1XON!D4>QYktt?Gh!H^hJ0YPwZ^bAnbR=d+A8OnD&tv{ znrGF4*;9FwqO^z%(CJ!Iw~8F6%kOd$oE901m<-Ib_#`GnX@fX4kl+NK&GuSRO0#M8AVXBCJ7zZKeWE@uhIE6Q1V zN`=sNGM^C+Bp;9K)x^XNoOpES#*;a=m>90zS5%$90qnzVp-Gf=|MvT7hGiep3$as= zH4nvC9@eLC1%!FmHIL23wNWUvp8NWc>S|C>d@>C|&>jx5=0o?P`1(d`Qsd^(-Xo~}w2w5Mvnf8rhrFo43WD2flM9|(zE#I% zn%fHZ1c*CC6r~n7#Al221>(J?TBn<0sww4;Qr)2b%-Xqs2RvBhb#b#wA`5cU!WCP0%t5uv?j+{;`b6T_lF=jjYzp zZV!WQJYhB>g6?DFdV}hn&WEe6k(}SI>2+rzN(n3}HVm&%Y+JbvPnZ>XENtXxo0iU% zU0dH?W{TNXTQvw;T*!NJN+LEbPF-Ol;eFbZ26>M}9rT8)oIPLJ!^=qD6!)HONfBdk z;*$^OvNh&`q^@ATNMZ)=UFDL!?ypnkMhRBly^E3_!0 z2&>;5pI_9H%bsXJZyu8rtoG_nWNx9~Ldv%Y|9sOr`eIp32!>s|9l80L{en^Vi)MAYeAS+upRntudk z9LurFeO_j%SL$wYOJvkO+6{#!Uft~2vSNjTjr@J)SmQv}y)P(e4oN_zYv0QUjD|~- zUFm5FP8@z3E3u1vdSSybmWIq`SKMRP&bEnPYSS#U2!X+b<(0363WBL}pLDuGHZ^&3 zj`fIyW5c3D(uq}Fc3_3Y^+{*Mdan>gQBk51hsL&D907I-kElC zfq32sOJwdLqeq4`3r-!=C6Acx?vj}cekr2$;F9!4eSb{DP};h6^5lIN`W{H@b9>%} zEI^*AX+CRlS0-s+F6LVnUxL(_0#>u#sqGBFd^+iaRT8_oPhXJTzd451ahZE1LBX9F zvfVyFMx(v@?(y2_P0mp}8OJSNkq*o!bDq+20EtK#6aBuh?Y!R_H_DbGf~pWRchorCHDMLTx(Hyjns*8Jl*DE7ya!XP}|y zn7-+J>ea&R7iW36=s#-d6U$h&x7Zpdjo0_9p`3MTq=>!R9LmPB?%${o(qvC=KOJN6 zaG=tgn4EG5o7%TWja*R}PeP5kf#k&b&Uj<_lU5nq-;1QT?JFWlvAHjdEypxmxpf~- zKn!a;(a{Yswdl1;rGc=us2-5@!TA3w6VNM_V0@JDfe-JIE5%OQV{IY*v-? zhhAD%uc+9lTZG34hR3Y0>zM?h4ur|FM~^{s{G!@RwPGjKveqLTtsYaxs}(hV*2SDY zsUK>DPQ^#F-6kTBb@rf?&zPJCf6=GDZzhwb&b&*SQE$hDL(79BL{?6iil;qI+2qtQ>s zcRSiBNwBH-6m0ljyhHI&f=&ouYWR*O^S<{&P3zGeZx+3^Y;<)2qbe*~673;`>epDD zOwY;44tYm)E$0T0nL8O1u9jS%)MW){mpyHk8|d6itr~du z#|2yK3}7Zk+QX5W>u(sjb6cw36OkgHn9~a%a~@PjHAe=~C~tEw&T1F7-syE8n4-m? z`m(x|`v=7&Afo| z&plihh=R%#zhQ#bU{1R2bJw-Vn(t71?I?PRQF{-d#CA)6j11ZS3gBxDF(1zj%;`1^ zYSO@Ph(eRxQVg?4_nF+yS5J7i6Q8un*o&SyDjcJKVn&iwNtVtALLziYFt7eN?+{Q! z-Ki7o&}ojhDT*rUYJEC9NA%dyHphkHA>%_Id_K$X5%9xSs$8!a{l~?UWfPF?&dqlv zztnKsTCuZ=jK?N3=nzfpVpwCt)a~gTBYGYu?{rW1k#D=NycL|EjJc-lQmBz(T(hOg zWE*xf>=t*2*~Z4~*^sI2L*k`*hC)6hK7j-rOG%ff(0ojp`GN{Z?brJ5 z5P`(X6yVLt4n%C`2Os7*e}GIIV3q9!c1Dacl&z2%o4N_L|IZHmTY8V8S+S~3!oHGg zqW}(0+=iH5TAc-@oz)wADS-P@GcD0+91oM&2N*b!IL9wBt>|~Rh1MsEu*oepvPelU zps|Dz(p=__A%@4uN`dMfD--gmcW(_N3{J4X>Pr*pcqyXzy?DaEy^-;Dh}1F5tBCM+ z%pBm6Nck*e+MOvVpZ0a8`RyJ1Wi5-WNbclQlQ7T3nJL`9UIZ;r%XIu2s`oG?moN-d zkg!^)CBqHID1#0DfMZ?RonZ6Pb*9&#iD7iIPcK>6)yXBxHVS!Q8F8Jk53lY=LT8SU z{sRWz;&*&DEwmE;N`BYEicQn?kGG}hidjoPpoVcsZn5>a3_Z>D)%hVN2idhj-yS*O zKj>pMkQnfb!l6A%9-VR-J7TMKJRzTcBq<}(Wg;Pe$?fHq^u)DeYO^FBiUi#XP~2RT z3!otM-=jyCN!&Elqol#whCv~HGBVb2y@PH2>@qF++m=s`X>fQWZB@@-0NNc5k#Cj> z=|$tojmDRYx!emDniYG`0WHTg%yQ*Fw%9tKcp;#$u|g(CrdG`z(*ON4t~dvfSLX_C zqUL&!tXuNJ1!)fV%unq1UEqIf`UpEu_kh31eBt1sx}%1pgKzlr2Z1&^tww|h9F$aD zAgsEd2)){yKmek7-Do!U@>l4_ao!C2Q1B$zf<{i}rOhBf=)Um{CJ`4Fo>nO|g}$gN zU&a(pfs(){V@a+FuW!{T3-`w6NuY*t14(8V4;Mn7)k<=%$*7|5_8^&0)NR0H*IG7| zw8V7?0-eYr@QS?eJL^Ouh+|{u^M#1tkE^9Fz5`f71TU3^WtYR;tatj7k1k<$#dxhQ zw4?F`FKgz-he}VSGw4vpoMM$#bAVwT`dW{kLVfdlbwi;OG-n(wcJ~CzWL{}Om_(e0 zs{$=Vg%;K-XzopLMUd##XbwdKdQ`iWzPlgu(LhDX!FEwB?uuf7Lw?aZd&=@XAC(;x z0_myROVGEnF*4z9vvp-&s(t%;(`(d_>~@x=`$+Q1Cpt%Y~OI*P5vkpw38x2@67uwLt2cQ~u^j zT~fsb(qctx2FEA>;`aZh1CB3>}-7_hmOjlYXFP1*R97*^}48@ z^-5CnWdhN08eU7tX)AHX*b~z-m+lk2Cr7$0WeHug&mPZ^_IO$s@qRwje(TX0~XQeE| za3bch|K?owvm%+kYnj*22elMtU%25k(av+hqfVjBrpB#Ju1tNpgY$yNb_}5U&z}!h zq}nV}DsF2NdF)y)MTo-Zs@U#7p==VyD-B@>?UMfO29D*2*Iv-JA(bX1rizSO%8Zwp zQsVg?SRw>jAs$*a0j%E!pGF!6YG-N6K3)>_m^f zw$cV(0od2T@$?oiAduCpzsx24mhAuv4fUjel#f}^259`zA8<2D$g5j7n39^ZjO2)< z1T9-LeMpZ7-Y%qPb~S(P78>_BW!CGhHxEJ5`ZzGwsIvn`=bNTmx$ptfIP7(?y=%}@uJB*iB=5??Cqdxbn~54vDl4aM191haAoxL9CF&JeWR zbqHvftU&)j=iLun-!jK603%=npi2qP&!Q>OtLz&sjthMG6H!OT3vBppy!Uo|-ZL8; zoTpI|IuX_RNpK^wej74YYJ?NE&fYBDw3?H;VllEqH9=FX$~r1J>KtJxJF?yiy)rea z%_*VV*VPn~iW1hw&OdP9PQ^H~a>yRoxXT-q7d&o?PAJNc$-o*elL=W`|Q7N@%R>8P4Xcm2hb zHwBEv&WQ@R6E%DtK%bwak-d9?97R#V#N#dnx!#-nO3q98_;(cSGm?>9B2)* zh{{XCH}520ycBf*MTiQN#;GMAk);@l`CqswaaCE7n)BL~!!f5Xg>cWQ7Ew{3DO&vv zdz44psmMAZ;<4uX#)3P9H?G-usL5)<3WrS&qFkU5cx{tF`jYseDMnuX% zPSyQGD$HT1lQ(X7kFXqI7z&SizGujevP!93t|+8uiaNw-Y}I2;jCM%1h+&gcp!Wo% zk9}|TALdmJWJTr^9)0hN4>vUzfiNEpAg(_HaUxCe#dO-5Jt1iIr?QALw9A{9TFI^I zrLdoI2QMJE*88`M7WW421|!HpFGs;(-{9zNsfiqQs7VE}--sM$wb@xO6gT1Pbq(0y`?>|)7BA230M+k z{gdZiWHn?*%9kFLGV);D(;vYfkA(HN?qhFw4<7aC=TwBX#}5^!&NY52En_i}vtF6^z_# zIL>w|)A@L5XH}bT@?wh>@H^2*1KK9bCt&Pa`geH)&75?-cr9Y-CqVJE2`(knwE7C{ zVqSOxMpziiz%V(CxT4VB7tcpSTJc2Ey=3#NowO!-N(T}OufHRh3JMKPezCXY5XRgP z@{FFcbclBmEcp+cKHQ|~I=P!5dA*5iH}Swh{I_KGciSCgTNuc;ElUGl5D9O^7a3{3YXThwCdFF~LGRgoKgrMa&ecfjK_-`qvC zls5d*pqBPnxyXqxjjL2Q50In(o;Lm8Cm|xJnXPLBLV^N*Md29768q)H7jdH3G zs>@T2zOYv4w!U<+KFnxGRYAi1h48BC(1jGjFk>!jb@9Zp_}JLd5y<0Hi93=%`Nw1a z@y~m|{gjLD`pIV}kvGD^OaR3qFa~LHRBw^sjrn23|Ei3i?jVn#47$%*sY#l4h~h$S6>V*1TWa?a*`Ab!pO5 zxqqs_b=gL6!5;ia3kwVSWKiSN#-!Bj2eBU~AN}+W|5GGF5Td~{jftT2B(4ae#^Z@j z05t2Rl!yBx)LIhHKE+-`3ao%)rSOFm3b9SzJq`To^B<&N94i(vMUYGOP;u1}}% zrLt5n*R?@2(aW$^jEJ$&{n%DjyD+cC1%Qi$$J)FaZ~4Vg&A+rMf_d-mla??sTjM+u zh_x46h^*7Im1D+G2aKvPikSC8j8{DMMr%=is0;u4t?>lBYW8Q4U(DjytFoluS^Jv2 zi=q!MvQWy)5#j%1qBdSj(4?|gthoj`O3D`|Td;>%(E z{_}5~yWeB3I*PnJu%nP7`1b(@y`{}CednhZD%{KD(bgwF^{aX0e+i4f5k{_wdv2 zc+htdP!d8P3EkJ0*IVp1H^RKTnwy&e8}b&~4I$Y|^>%2X(&+Av=Kc${YPnXy&95vn zUetWo#7~}-AtC0xRZABl|CuZsqB@A$kG<9o%)y5a{N2Bne|wJ~!@qt_bYb@=?7%?` zheeAO6oCNz+h!5SgVZ`dfEJDXU~s|r>Ho^W-&Vl?`tC>SqHlur9D=R`4m%C%`R_3w zMo?X3*d-#At42$_HkyElQzOxT#a{k<<>1Z#Ib?t7O8BerB0?8hZ&IM+X=og~REgT> z_7J%>i75E2+F+sH9B&txTiDd&Jzuk_&?XlB){js& zMM#XbnskcXctX7#Tlr1|4mc}&=u}jc0%Z_5hHNO;maLZ)V@^F#=LFWUIHEILn#m{l zd_L~SlR^7or7x1Q=j&w|$^cd(MJp{-YP-)c=i!i6SF*MJ9jq^-*dfo#tbfKlfAe#2 zR+xn5X0+z#Ak*kQvN<>(lRRo{?W_~(Uzg$ZZtd#$ib7Udsuz_hDwyEstqyt5aJ40Y zu$myQYOBl7cD7#k5W$4Oe1JxWv@`Je)%0E62#%vPoOqp9BP7M(@b52hd$)H@PxKV( zvT!z_dIP|-PMXJ;xB0PNANH>lYO-Lv^klDC$&csVo;9p10FTl0wQ%*$0XCwX5Nhwy zPQX$h{M>=-Qn#E)_fw<^(zpJ_yMOU;K0YF_+mAg|Hg`(y;-7BMKQ`g_f0pk%ns}7g z<_2;=yYJi6ou!%N96FW)8+hav&%ZqW95cZsCJf_2!9$We9esGS6E~yYWA2_|LW)iI zE$cxo%G3bDjXjR!N9lGH!!OKqI)%652(PHC&(F9fUFJT+Z0BlorR`xn{!D1lp= ztB;OK{mVzlzb!|5^$*ZuWT<>!qiC#!ri+#*s(%p$j zf%#tfZt8QYH;|Iy_SV(yzQxNWw0zf6DM9fZ#Gd=t06>Tcx=!LK|6H7~J+g(aJrP~B zL$jlgchrB7uc5?w-kjf7@bwY>t}Xqm34wMj9EdnMj@35bnd;0MglYD#L>(=Imukk< zQn%3xGyfmf4*izx^sR3np>D{1wLtX_a=|Pg;w9)aM!{Is?$VGs%;>UD+6bByJFxT_ z1D1wu?kHFo94{KT!`i}FZ7@$uG=y?GKfL0 z3udUiY))Fe5EW#RrK2utKRt@+O;+Ap8kvKs6c0nZ_8N{U5OjLo(>T8SO5gNNQL zlA!(P@7nU_j0~H;P%@qFv7dR>uxd2jH!_`LAnCgGQn0^d`8eXjD{J%E{Kh85k<8G2^7m(v9Ay6IAFcZi z&|7%(Q$AX)cP>@eK4-E_YvE1c++$Zw;jvPokuBiqM2eA#o$}in^>bg!iEJC=)5(mo zp+e@B6VYAGgp_>I`?LTb9v7&oc}y(Gsr=#@e>_zaT_V$+++SsJ|3MXYiP9oPQm5uN zvf@6f%{khquOVE_d8@a7vll=m5$?4{33{(-j?4@ce|w?G3nYBN0$>6OjkBho<%w8k z3me>cmkzWqB`;yS*B`}J`!V%j+4QWZ9*EK>ApG?kL^%>3yZil}!NvexQ>5Efj=o@) z{p&utnpW%YDBQn8)h!`mz%-_LeRL|(y)E-P@~tg5x9rS_AJATj&AdIU`$5FNf2}>g zZ`RitIy-Pq{%7TX?1e1L&Ck$w3J7yYwPzAJZMTtvML@0Ro8hrJ0j^YUj!thl$+S|afQ5h| zizAsJkL!0csr9ZK!nE_$9cLTFsRWM#7_TDTtc;=xgK6;CnpcrrHO@2E*6sk>H*skG zkYIXNC^8lsz*0{8wPiftZsl82LIizu0Wchl_d}0Kim|}}AhkIL z999R3TwGk<%t@rA^?AO-u`E*7`^?!9e1rK1!1s7qxVX4DCb=l#O)tr!jIgMfSW17@ zv10QH7|Y{hW<-3g=b1Wh%$_!46_KI9(Kcvmf=Yz8b`Up6J)S*!MX;Za_gO&|9O~s> zbLywxl`jsLF6CvAooQa;Dx2e4Pq**v2dY?}h;Z>cw;o6# z*9-aVx{-&_*izYEP)WKeNNJMn>0Gwn3rEvw<4`M&KmIc(pv?S+C##qPlQC6X$^Jbk zN`MhvqrvX@E?wb>p1fS*J|5;j_@)mzwjY*YZ`T zJl%{@%3iGDGV<+}=}@cmF6^M=vd3IN;Ly9}k>+%TGDsMTdzqV;w$H%@aQv<=I)V)V(z!m~lt>Vd-8TtrDo@+%Jk@wJjV9 z1{W|^4h)AHorSl&j(8BM1{I}067Z|Nb)%Q8BN$v_d5&e++)7Q63&yF&UkXX%UG&p@ zvY}1X`m~NGE9%U5H<{?AsVmE8zjHD$0-X>f663TpOZtKR}z@;x>?JlrbCgrR5H0aO&_=ae+L zP`vN28dz?-HEip9qL21DM!RHOlk(t^sjY;UiUCw)`!SW#-Zup3r3U@0)~Ad43MZkJ z?RHu$eX?f@FwIbGf2SDCtWM;)k1Sg+E?W=Ie;bEU$_wf=bAlGQgmsU3VUKxuchOQW zE<^WOm-45Pakhh-H|@&C{cW_UY;RXEu=qPw9Wi@53)W>stp5mK@Wk|Vdhab^&=OC- zDZf5qH$xp4qi&S{^)nne%lee)M+iMwev1uZdUPX;cE!o5#U>g{njLs0-Nx;k< zHR+3KkM$c$nVwxoK1{j>byv%*bI0bOK8amsec;Xz_SujLxtP*vj5|tmnHA(IJgXb? z4clLcr4sutr9ER1<|Th44&5{Z`Cse6mQ7q+E-K1T>V?LbEVA^qR;Vu;Od&54W;dKlM+0_e1NF7y08(E#N2bpG$8Ws|G%UtJvV>l*WRbRh%VHNlTUh|vO93<9U zcFly^033NC_el5ET7&c+$Eo~?oO;m|-eyd{95voWB*_g#OxmtbgT{ci&b#a-mbYS2 z?E3U^{A6OX{7V)xbaI1cuk#MSYZlmgS~Qg(^z;dMXUF@ZXr%!E`Kf!Z&4nBodC$9s z=_RcT8RFcrmG^X|--$iG6VJGG6!&d!EpLxSZjFJzNSB4_mK%>qcBc{h%5qg3`=r}p zYA|m(Zr`74@y|cfPTd4>c$+NBvf^mo*wfrftHOcHXV(p|Xw!IxOvTjh-cDaS&!&|l z?=YpMr4MgGPws2r2(v31laI`qHIF4}FpC~69<`k5h*!^&Z|w`&VaK8nAFwQmvpJEQsX|q1Vd=ds z?Vq+3sPe~#5=t3qm{0$^5d^&%sas~RXI;J4@M!^NKT4fH7#sr=@l{Ynj)6QQA# z$I*5jOWM_s%}i>E5SRSl2!(^YL{QqQiffy%}?E7$sj%#i>O&;ji2 zg=e26K}#i{Y^giH{;CwX4^7-xDwuF-7Z*12htFbI4VH^KWQ5ud&IF{mo$k#j5iysF z`KDSkIi#YPVa)Xppy^AIqeLdQ)C7*e;Q!v+bh*l$7a-0c;>VG+)}~K!ARxx zxA$elPn7z&`Q~MK7Y~P8*%W+z%!9;yxD+%yUI4SOaGQAfi4t%UOKm#5jpjM@pgnjx z!PcxUG)ND($g?exTMp0>tmdKrNn(AB9RCv15E^?rYuy{HH5z8Sah{!Kq2F3AJfCD_Ee0&IC(s&rA8qU0d(&m>%?Ta1@+OGu=VT z>L{N+*o!>w{(OL6`Le(*GbGi~{`rXIyW{GS^j=MJl5@{H+iCTar!=P?ks4C=;*c(!{n1@jWdtUb@CckzP2gCG076x zrWXkhf+;f}X=<_+02 zuPS?Ea_utH0?UZ^%HUOM%%=~a-`q!m1TZM?!jz#w9TJRowGAkGiv0;d+_g-AEvtvEgI6-fm4Hcs)(h#afimu+S!vBX&%C%3A!oL6 z0GZTC)}^pyn1L^|6E z5=`FhD3fb@F7N2dSu$*w(-&pe`}oh{J%sCi5=`Fm#u(&s@QDoqO>iGYG+>3_Pe(+R z5W)7=&WoS;w>5Zk!3pQOBZvR>9I$hCYI*vTWlY_^Ig)OLo4ip02Jf-9U&1d8*q>+Q znd)`mz*DREb0r?qkk8IOp0Z{zboHYA=V|!co!EU4W-+o?d}xJ|g%kr*`$RmV7kgRoiv@deIgpX>Kk_CV^(fOSaECh{fK4!YDPy$L zCtO5irX6(h!Jj`N2|N#%9LO<0fN!(m4IYt@U})Z_tKk)hU1)x|B&WAi^!U>i2iYIMB}Yt~4*yNGetgT1&m^ThXwCIdKW2wLFgXa9 zRB+Sp+2MtcgGBr%G{0KJ|AgjOtAZ0h#D7AAPapkX3XN#(>8DN%72uz00|O2eh9Iv3 z9LN&2*>~oLO9mNd&H)wI#N%vW`2vVIql2oO(gI9$fkH*UxZS{qqPc8*8w?7$j{2ar zRs0ec3IeXlcBE|@>h@x~6_mpxS3a<@ECrY1_~gqrv1M`Ar~$|)i46s+Th7Si&tj1X z=z`Vw^d)E*UYL$D4=7MpAw4xQHdY86Pqsy4@!}0kP%mKG9F=1WcepaHZZ=X*=;-Je zw+=j!hPJl0H3~m;-!If6HOuLPwH&dlSzV(r@nsg5V4k`z2D7aO4}XFbt;ar}b4<5X z0@zm{@!gHi$gpk5=)oV!9%NUAcipw|fI_k$y$$GW;$6&aGj3q#^H7e0&t#o|H$6rz zO%$K&i^kA?f0NQZCYC6~svn4Wo1NB;V=j|2XR>VYKbm_lY^Qan%gjkz9bV+Y6&?*B zMjV)iaZV~gbhDP>JOnJE?Jz;C3LVe_>}R><<~`IQ$H+7t@fvHBLtp|-g`m~&D{h=d zw>#tsQf4f~ptJzf6?=-PPB~9@Qku+(- zNbVfQnnBPj^Jl-%m912o9^()(59z`0PD4Tx+@=^dU^w(Xt#Fmn$>MS~`PQVFl-Oi2 zOCRm;r5+{VHSAn`_Kw)dS2{HzVdN>R@9lMLe_vahBcyXY8+#da^HY7 zX&EFqpEeV%L-Y5sGF~J9dVhRkvYWt_R72(9YbdOI&`s`$Fp(YfH98?Dy?Adzd@8qP z;KRq;lh6{JTAF~e)qD*}$hHhzL9HanZik>aN(2mgon4;?*OQ~SPcw!_M*YIBeeEv z=Ro(j%JRuyoa`TOOiDS*9ov@GV;(UA1#v7iR5L4uiwa)$yE~axoy<&WDu&B31i5{J zJo{=r@cW6{^c3jkn93;ehM!(t$}ii5G_q3Vi5(vSr{$g-u+y|%M=qM>5mSnbtxx78 zD2Hibqd~`UXY-XTG_NgAq%0Q~6-lnVqbMCywoT?arHo&sRi7eg8PXni+!Qac@5qCq zf8q`B=qpmptR-o%tvJ3$(9fN97)Wcg`ks5k`&rB64lpq=^*dG`57;dUoKi5v?5)!T zwzKuDCEk@sgyUp+upS~7mX$-=Edv}*m^S5$0ofA8>6=1MKrmh{R8X*lyn^2q+=U+A zf3$VuRGvKkk3mEo1@RhsDHrokcl2+o%8tM(PY2~4)JLE+Tyk{YoO%aMNQ8?Z?xo|; zxa>mT!bK29$9#4OPNQ}~ctFr7xI>_b-J_xSRog-KCvb_#a1;A4 zzU9YflH$UHUHSn#?7u zk2*g-;-}39j}-kFm&G~$j5CRHrLXQJprWgsD%)1=kyd*-@W)Gu5Ol3dnCBu4Y-6mr zOG_12Zg$mGE=yD3eGDzt&1c5m%JTb)?8j}tF+@(TjBPUVy=`2I=_)+yI$NnNn`M2c zdThpKo$|=fcI|Kb(WnYh+T&ZvsWGe*5sh|Bx~syaU9%t8oR&X2zwMcAl-7-?Mi|;7 zOAX_M^YhkUk4xc+S&vn0FU36_pjPLy70s zwfv&^{qV-+L%5Ps!&UlEqx)KIZICXTqj_ViKO;-4bbNDdL})M>@FPJV)76u^&t;Z=fP(KF&ZVSm&z zo1AD8t!*YIkB*kqq4v;T!SgoPxk*VdK9#z{!_Q$)Tbc)7)uBG&-PC87K&;P*Ozo8* zK&K{{<=3J5i|-Hy-N9SMHMfNyEJa9_@Lh~@vPV`YW9LUOw9RDr3$~A-I!_}>B--ds zkQau{Mqx#RyL#H)0}`w8d_fehzC(6H<5MTqOL2?qpKF7$Jwj2(@!?piUO?VZClqCb zdX11Yv5YWjTeG&YdYuxAM*?4kFb2yH^S}%8W zbH-yEmr8U{e{&Gda(mWl5_dz)qIFCB-O8HDztq4+E-{R`9P&2 z4y6zrZfs+gY1=N32~d}}#v1{5Sq5N#nI#yDX(Id>zwJ6gN}ri?I*O;1ms6V1k8iLML&-~tGtH1f6`tcPJeRx^rpC%?b{FimYe zac43lz+<+v#om;r?Z^~fJ^x2-4}pXd(A8ds4jfz@H+(ZABSVi}C|70$1+!0d9KYPR3Ro1Bzo0Rd%gV%W;baA`}C+#FN!fPRV9#wU^jMpe%e*$r(y7PUQS#QKth zs`~OEIZoD%AnY9Q${-h;%sjuppzALBt(k(Q0CFDT-p|GNRZ$qPDMnubRZMd>ElUuG zqLDjl_W=nlMNo>s%^_*>Bgup&4wE)k7FJ|$%Mx@XLG{$F@!B!RR<)S?G*lyPZ%-Ja zUE(-YcD`UugehX<@MyGM=s9>$-iMwD;P`Iaa{`ALKgYzsMqMoryfCK^!2qpPS7D^_ zpy}O3n5~p|amLXw1kr~LJCR-N@mebD41R9{osGM5&F8AH&Np1Rfq8(`Q=e){)y-j{ zWavExjY!?s()EimF3*f7ty4<)^izM+5*b_=DLpoPS;tVwX15c5+Z3LCzn@PWda2aX z0dkv=kg!(1kL~rr1ZCMZ97t82hSC)#z0^=Xvb*;!W3xLWZv9Xiv;VTv<=&4r#TCPt z*yH%lRt+a4!^=Oz42KOE^3w@~uz6EB=&lvkPz7lW-A@#vqd}KmowFN&I@4icT;Vs& zrYK2orQ0gQCmi)0sF?@ZltVR2#>~7)j3p#i(^p2hHnR%TWKPKn#ZVAzn>6yjkQfX-jvMk2=>pb4oEdY53{-Wv4etZprrF(t@J zLak>OWqnT!dUvjz@ocC3=$c?scK2;}WX&)}Y6WqWuS>MIg$kbzjLIT#!U(q8%jeZa zNEGYwUa;Y4c7j1V#o9vhEB6*y#-gTs3US?WgDKrFnt51M%L-At zL!VXX@cAvOT4-(|Ju+MC*)R3_9k?lZkD*X|AR{7cRpVm)uPRB^huAIw>h+EB+oma; zPF}|l<^16`(@yG86$RRM-BR5i#%wf7ipLUvm476Jd+|%DP#xB|J|2k1)&`F>pcp?Y;I#%Pab@i$V|s;J67b1~b+x80gJ-5w zm~2uM&x-Fc2v+bZ>64Op2{h5E^Iv>>h4Y`|bNL7rI6#1FECdq=h@7xJ7z_1MLz7vz z<~?{fzX^OMA6iW&v_3&l9V&p0;`&wB3WS~)9y5KA-5b0SUDM`sdMeBrBA57c3ey%( z$@}4x?T_&8vNnl(2_Eb>*4JN+_1x3n)gyEruT!RggS@@)6nNH7{n6D`y_z!&JB(CZ`ep1YgAt9ZWfD{VHOV1kJzy!!TwNa1pGZZHiev8fHve z)@#JSG#y25eW>zfi#%FK+xnxxYaWYO;>xfzb{zi+kBO$G0WCFz5cISQifO^jL~KTfkIG1$l5Ki-Ip=Hp1i#7ZmM0LA;q?aBN3b+YMq<0|-o1cK!Ja$vKROT*iX_I){Q(9!xhIR> z{HzBW&C#JAo5Dqwiv~z6Tklo ztwsA$l%k#@GSWpyl)zbAc~X>N``4gTar0zA==bYh_k!-9^1Wdyzpj{VnG|?0ys>Nd3u|7p2YF&^o;k z+teWZP&N#ihfiqgE;D9WdM6rh(^yq-J^lfxN34?=x5R6bznd=0*32L2*Kd!U|2cDm z;saB^q;9?o#pbW_st7xbDW0}0J}!_^L22RL*1xHpO6r&QbyMUApdO8({Fu;?c=S=P zhe`j2mV5vB*6W{p4gL&3ZROBwb*PTMM&B6o3z0^Z&Q8k`NzpnK$5P&_YyJ7nvIIdP z`E}s|T30n)N^f6+BPB$CADFZL=9`rF!P#9pH7JQzqkdY4N?Q6o#0m^bB>kW_qca;W zV0iL&DzVC+;Rgb{&v@&U45IYL(nZ0 zey-QX75z4Uc$-@&zwNV`iQDqP$)LPGSj63aL0|s1h(feajk4ya_gU=fO2!V3{!L(i z%V?pUaOk>f-Ifu45{-Jwzaa*ve~LL;zRr^qVrz} zIt6A`5w*KZspr^TuXB&^%*|nTll_#U$_)egF0OmI6)Yd+ZjWuAP!uTmdf;TcLv_vc zf^q7KuVX(4*AMRxVDCcicO|X9%)GAI-95s`SvPv%;ol9Pkus!zQtJw4F~JB)i(BKW zLKxwEO)dLK;qOxMIundsrAEEZbI*SWMc^goFo83XLV`Z*zD@OfZ%_R5ocx)4dPp!U zlnp8ejrJL;Lyhnyr;bgR8Kya>u1q(q{s=(=)uJ-^$m=fe@vg|7r{VfbIDePHZW#zI z^-UF|6uEHD5Svi2Q=%)APU@ge>mdv=EMxh)Gxk{J!4@tYtyBrf9+kM4l{XO{Ybs%o?POp+wWe~%y$1c z*?-6#6E4~Fic5kgqueG8TIE@p6u}NmIi-HNY#-+ASh+U+x4T_q_N}isQ&Oq;EQA`@ z`QKGrghPSrImq0=xhR6w#E#D(Hx;Cb9pO=+I%-uk-<4zN=3K=3V_OgJC(u<<>1zp) z4NbelT((j3*Ry_?I$AoAy4iirC0Q(B5)`HDs_KU^m5PvJYA!cYc21rAYjw&GA9^Xm znz1U)UOcpJ*Sd6=zBKypN+2p6cVh1>EtFg&2pakfiKArjNtLg90{&Y7++}-Tj7YbkFa$cfZz1Ktxv)*c_w)!r1 zTigwD>^W-|n@aeHr+xhZ>4zKLt34uinNE(AL3?(=po!_SebryM8TkL^jK{&DPR z*FciC+SsIQ*qZ2CfE2fz8`wF9#_GIdoGAE9PEMN=4aiTO(-9++!saF!&A#~O+O^|U zK;50Dlq}BEcbdyThVbwWT^O#3;yrv@6PeoUOVg9(>w5jmk1Qkl47aN8JwXdYf~Q9L z^H#y}f4=?i20u3n&g=fmUc5f~=({zoz2xMaL%6-*w8~x2&?#o*DpkLnK0(9xxTrt_R}BD>`# z=R}@a!(W233w`3CRpwm92!-+Bqf$y+@4*87pPar%7ci1(!fY-A7@~3Ee0^7G5phnd zEWRQEpSZbX%<7(PDTTiQ36lvx+Uj4OeIbi+m!2|EA1nQ_?Y}P^H7_{IE7YUA&@Sk^ zbKe}euGwnc$DN8!I{nZ}uSw9wmfG*V{ydlHlKmghSi4E5HOb=jh;xBr|I|$~V9fN4 zSsWge-$S&wX|5Xt`5v;i@s>xXR)T8saY}9dn`%1PnSzjXlO6g#wZ%m~R6m34{igxv z0a#i4p_V8r!f{+)%ON4VaH_ue3TEn1)|RN&jRYIHqi5R96aL!4a!Eo?KHD%cHs3GD z*mHifxad^szsWHn2{jLV^TGl1$nxvIIs3i*I!QtWbT{7Xc!Z1U>4r$SI{jrTNG7$T z=0d)LvDFzepMIKOd8YTz#rW;6=47Cx+FufvFlRo&X{mPcM&A)4&GxGX4i@yfsULkE zyZ$m2%%?A+Q(fQJmxcyV4BVUM%Ag;}`MISEST7)dj>%lxcR)QV`(dH$`|=9K3?}(} z>E!Hgm)RFJH50HW#??`4e-mhO1ap{B+EQLA*xUITUa@;4G(TnF&}v{H83grcs19c# z`p%wgd!E-jjQM|bU3FZP%hwkKMG!>^X^;*HK|p#fkVb)}q@_!`V^u^+1?iTQ&ZWZu zl$MmGRHRF~;hjaj_gU`y`{#bV%k#`LbLPxB-xG5VXF-c$yAyy^MMig=eLDWR@YjGv z@UzEM_1px2Rkq$aH0LF!m+CN3&}(U+GsGOsnI?#zPpe9J?=@6Jj)=XqSy0U_3F+&9 z8(M@f+HVNm6<(a;jwj>Fi`Qd4z74v%X;Tj`V44s&fylz27`TquKQq4pV4BY`*`fRk z(*B|jk>D%+dd(bA6hV%JO{6VrmUNFtFjimUk@-h3{?;38e=l*7XaCkhRI}l@5jjs! zS{{rOa~2rwJLSIT&JvCNOQY8ce-$prxyAoa;ckO7*Df@`fH+C~DYJi%e*!70<>fF0 z;16j(fIl@M>jVG7A1v%(qUpI-%h6|4%`Q`%aKSgaPDcvkFB3LALK|zTlN8{ZLDm)B z#d&J8B0r(>#_U-Q?Sv{y%ly<;2*-4>Av`B@ujh*PMe2BJ{y*Ty1h~iqRJ65mp0T6V zkdjGJB4JDeKY!x4loV?wg`~P54STX|JPTobr15Q0Ct~})ysI^+~5kZUL zZb(i;L-7QS4;)dzaw-A1*;uF?wPnccl$)to8{pJmgc5+>pfoZZi7)>Fyp9ol|KzGb(AZyYixzt4*MjL#b4gH*sn1+$E@Ly|ngH>7BwkDG` zBH(20YHNY~u?;p00V35QzU-!c8d{g{H{+;9a+UW^B3Cb|RrUHb;?HTALPZI0EH=OC zPXQly<8n{QbDOm78~>QJzd_eJnBY3Y3XYaK+5=Np)&{&n@8t{$on|#8tSxd2p#u5~ z*^%!3v@=L8~Joa&^b;~GKp+4CjyAFYJQYwvX;gF{~+iKTC#|13lN*tz?fqN+*V+< z1EahzZR{<8tO}R`vg&@bJJgCoFyNSUWKqun3#;6-zM)KWzxEf@XhW<30T|&E5?uHY z+pS$D?7||)Pdc_D;6{*T{i<<`hVv~{^RgkX_lwrnprZXsBY^of^ArcaD5}Dx01mP+9>_sWLcj^^JGNcLW=Cw9GVoDcu;r{# z7lBxQClD9WYjw3J%q#ulc8X7d_umGZ!`RF5kaRACv*6XgL7~zx81=G_2iZ+vX>JS1 zX}^84a}(n4K->pGw+IUnLX-JFR*(W}7!>`{pcwIK44Banf}V&jS?m(iDIE~u7<3Yv zbQz5}<{RsRft8*z%npF9;sEEF#Yvsi`bsYNt>)P2YwtBUX*e^WyU>TTvb{RqwS>@Z zY#sV>_Yo!lx~2uR#g{j;j9pjICUfd`LAb_gnw*rLPaHXEnBa$uR5Q0V>f2+}TqS2#k~zW_#o?5Gx;{=jf} za3HQqUHIZ&OAbT|9#M`G_zC-oq#}seq|k?%<~y>r8$ zkj2a#VTCHaenQOYs4^wn|ZAO5RR5s{>5r^9~g)!oT*kfl&=W$P3KwI8j~DK5?`86R+*f&NjH+0Oj} z%%DC8w8!BzGN0D=m~fE?!c1a6tnKtG0-@8xk-7r>Y=$>U ziP*I0m+}U!od?7Tw=-po(^r2mn?Ec%Cj!6ap{9-fkAYj70Gq&BF;`J3=alx-b5nzS z>qtZuf=zy=Fm(evk3qS*GOzfq+h9B6ye9L5a#xx3>;umJ8_gEe|GEcGAT~&V!mVaa z%Qwxj2{rlvp~lfJKLhj=6cbXqV8(?;;9S}g-R;)ux@wv;!#)f1bHPqsQ0;Af(^1G2i+IFwg_*DDsz zrhZ^#KcI6O1v9?0>UQ$b^aToV4|CAeKM;&93MOc72vi-V3iahgz5i+rEFfEGh@##D z4T*M$lnGHk((BYyebK$HS2QXN7vG%n{Ko=)Er)hEoK?CN@Z{tF<8XKeOb$UuPUbVE zRcO2{Xf2ZL6ZKJRQsYa!6gg*tQLO`nA6_fbpF7(rD%w*~9LN)Rs}Vwm-&@ND2D{DH zh*ye0J5N6@B0|o&IcPtfFywM@FD!1V#L%{~>_k4|lMYj9sh>EFYV>6$%xLxhX$6{6;6Q~Vj3SWR>gJE5o zkuhrT7kjDaZ44pB(b4b*M`J@S?F2%Q7-ThR?-l&4>)WMl07!Jb6(CVG!$5i2=$8Iu zP|aj$!+hitUDuFL^Rrj?{@Eumpnd_61Rm+jxClYgtg&I_+DWDAhO5g;dwOM@$lLZ zoB5zakzl{<9(d^F%7&DEo>4SzUzvy6qo9n?58vgzU)wL<*zdW19Qwg_Y_zlJ7)o@2 z2TsdxC)a1-g*%W7D~?bvYp2_yldao22+(T7ls2|GyHf9ki*+Lz-SiTXzf9`OeV zf~{J@lH0H&)Q1PGY;tmCfkyz%98mR2Ul<3Q;Q1^=+>MQ^CncjpIwijRlr213k)J@AGL18#sluw(~ zKUW94@DNuH+~fH(ks0$pT32j6OEy9;aVUPUmT>pbi% zqLHQ}wG40m`iU^=ua*IZk{#Z5$6$$zsM5-!he(IsYsn(JG&PUJzfYiL)&e?Qa^PTw z19dr(8zZc`KLQr1ZXD{jo!``4co8dDKmap%Xt`cwT-uxjHo!&XVEJIEprYy%(HjLNNSS zO~6$*q!DK~{@0X0+TmZIBwYW6CEGZ`GGu_&4B@t4)Xz@qyju3Zam*ujyb7?NpcD&= z_aDC?V1cgdh`PE2e$!rfR9;o>_}(2*){&(fLXMydKqy`1`|=FKW|6`NSsJ3D*Tp)E zRn38?t@+Tw2AJxO(*MX3G<&KLtyF{seO12{r#L^~^I)&WF2~Mj24GIyFa2#ia85_+av85lOAQU^v4Kb*g)6r>2J)V34{a$5I7oEr=Y80p)nT{+0@jt+U1s+Tz|A0dD&cU{UXQV|aQwo~Jh%m$f z(W&k4tE>6>y`IdY&ZBSk>iUi`f-DeI@ZvK9)KK;#VFzxBq|Pz%QILVNQOWa@6HYeB zWeu&;px+C=C>0EEglsVpc{sM_)H@o2kTzC@TpBMBxCR{5JBm@=^Y~5Gy~FvDGoB<9 z`_0QV_C)lTHoa1Pzae0G0RjC_-sEYOl7_mI+^*@gNbh5K{v5t+v7E(fxUTonoj{EM z;SR&T|LRsg(=ZX9G@kI(;Yh+m=daX1zdkf|k#tL6MmLwAbQ|rA{kp%fO7*)={R5V+ zw<_jCf*n^W%mdr%#f)h`Il%8@_Rn~)1H2>^1@z<681?EOaOjuX=%C()o0D&B1qG!i z`e(+9I->+gFskY!9$ljUO18-PX$!NI`6fOns%EljV`7ynew&eV$l>thWRraLGi;0= zB1HYzI79EpYe*Wf+#d!JM{Fe_wq2;G7eoj#l(_Wk6DYRsx$7reXofB5bo2hwb{v`7wVs!44>QTiZkS=f#f; zO2nCVxPB3PIhC6hgi$L0X_T*|t+#8iXs)#Og9kMrRkktklBu!r!dU$nZxBc{3!xUa z|JH!OT5V}}KL+(Yk-1&+2PoTG|H*GRj9>{(q~@AC2rghl4_L1qbpYmw4rS zDC1rsKee^-u@0LuAs>yHQ4nFwQu!p-V`~8zz6Ej5HCDy=3iBQo(*jN}y(gy#t0p3~ zNjyrF^7QHtKu2)o`2;^uv>Kh}+CkZZ`Ndt_pk7t}%kRQtCPK@NjpNG4tPhqYQl(us zLz6CGb|JySMrdbXD>{57!UoNuRm(ve=;{rWC98b&qj$KMG_a8Aa#7ykI(~;}Q`1gr z%5n%#8OY@dE@3vt=$1hL04L}vPKoJxeu+`q!dFR4F=Bo3^$qzc7~6;UG|*Qvl9-$E z+@>8zzM%WsG&iVVJl*v!RxU}xD@1rQ8jL;j2&9F5@IQb46V+QeT2Q1MX+898cbGyF zgHZZs$d{K*%54%sr}dT7NVjoozzwu*Lf+oAvKYjGk)J6n;lc?Nc=(A(wA*Avw|ZCv zWFy*pd)KYV!>LtZTX}{jEif7X*F|1KBQc$-=(le>45;|W_)Kf2b5k%GrQ(5af_1(| z@{b=s;^X5jp1mOn>x;KeRsm%k3LH9Xg=`gvtIIp094dS8JeqNM^MpvY{WF#UpSPs&BPI*_aluU%0B{a)Z#g-uINaSX6>TbtOdkVIYU zYM3U2Ht%3ipl>wFVDdm(3E>y=A7p&bt-vmMCf?aB3~I^fMnFlwY1-!E#3Wxrzq{ z1h6P2T3K6C3EQX9PEArPgL)Z5pchcJ%M!@v(^v=p8+XmT`!Oy-d^_7QS-`E&#Dtaw zXU=Tg4%+iHYDt~+JXi`q zMZQZZ_|ev{@1P>DlfbR^_I6P5*`=quyEg+Ajs!(!EJv!G1Qu?2w0Q0-D4D>`nj0`0 z&(UAOMOt0m*rxlQ!5*ENU@^=qcxCE+GE`zFU%&;$TMDY37Eo#iBwiHHzFZeFHPs%! zu5KR%8ZM52IydmN1&|h-C()fTr=p@#P`y|M>SVmlWt#%6Y&mCMb3kbaqaVfL!PB8xbDQzk0;q`hV(?%a?xn+~xgw>qQvW zWSuO-F-Yo)J7+VF1du;rim~fiuZp_}^nJXCcSQ5_}&eImE zrofmejGh5E=qtHWLE>a2S5w{o22SuLT|xem;4xi;0c9A5j=jkST%JYCr1wfvEkQc8 zOv3{4#fynv8-vl&;i#&D|;Kl&CiCOvD?tShVr5-K1U2r>1lI%g8aUagVbQw8Uz!wH(M- z@YD+?Q@R|SmZN9i!aDeQb7J-$-*qT2!IJMCX(AU}C3xgL4YQH1Pn|-nN&*Fx*70`k zsb>*!nCywbPJ4BhKZQBjF$EtX7vQWEzZp8{T!>)FH;#@k?TEsMLJy(hV)0E$eVew@?2=udW6=je!afL{TbK-L zb{$A1LQ}cUW2+8E!#da3N_@hf-T24-qS`5VL@bYqd&0|L)t6Ogjy2*+{_;l{x~cz9 zVMz1XXnh*w~oIt!hL;m({I#Bb^3_=?;%22DnR2_^mj!zmdA8ge(_in0VOJ>6j z`bhQ%2IB!3%x8H!b=?%L{kG(yOi^faRM|dgVRg$f|TkrpamvH0&xeJ+Q zPGfF$0~eY-Gu4&&g+B|^qX{-J=Gd={iT!7J0KpEPtw9bP8jpUHMJOi74k7JA+JG$C zj>>F1>DD2O2_Ru!IC#iD=}=3k8UsB3fa}O^NT~&O16%Ns-Eh&i#o;ubkndk~u zN4|hT7%ZIkx()IQ z%bl0K-u{P?(g7oBJ^n(C!Co->J#bW?e&Cbo&?7oV4;Y`Pw?Z#|q5JBiW8C15cb#8`lA;xy#r5eRIp~z>-N8F|mQef-81K->;>O1? z)t;M~`I7r?82UnT2cXofe6t^vd|>AcxWR7l>cO6c{<|B#RN$aj8jzBoJVsSKoKH*? z*p4=k_HeL|XNar7&gSIgkjZ^#0FHqN@Bq7GClE&uF!4K~bZH2c-pB*#`I_r`&#`WO zctm$)Hq@_t{uRXzg(D}h1&DwP=_FGuRGtB1-Ol%o2{Tmc_@rS%`>EtSJLaE3&y}yv zw;3ogf5p39?0bREwyAm7G&*sSM+GWHuy8_{V1)~JZ^mP~Wr&_TVuj0xy{h}zcK%-; zL_8w~F4;%OHH;&NM^D6pEhDo5tBqlDnJ+y)sXnxv8uc$ZM*dekMA0(#XJVKbeZIs% zEz@@wff%_WvvUI??7$T8j1YkFGq~`_V|X1QlqE7ltJL7R_7M!j84G8a{aYJTOoRf9 zj7$E-RLG(m`~X)zs9-ybDZF9HMYx}s2EYCohOYST1uz?S&d*}UMxR9sl$~um0SH6f z;{XgHdnwWUm_Wh7e<{PK{L~0K>+8kfgfmio!Ty)J=zq~uqPd+gr;QqNj_|6$v&rGz zP$~gx!Ya_?K2Q&_gM5!j1T6+iraF$1+XkGBv?2J@We9J4a|h4n>oJ_d1o`w`8rY-q zVK2D$T(raf!!I0w1>iCGPRGXo!lBQ?vXcqQO6p}mWGrA=RTiN9`ABDv5%3M1{_5w% zs#g)#i(k#@?qgak9B6@Qc9Rf7T=at1@PQunHx7I7lB|}j4MR9u(1Wj>x_j*y*F0xI za}76;3fJ<)9UzA^$kg>4h*v30!SLUT>Hkxg6B{0yHcbjz|r!8Ytn+hLfXU{*Nz zI3{87N8V3Ot7{0*1^UlwX{pMq%d|E1_E;6PC2>rkG8wCMB&P2-W3 z7Q*Dmy1e^Z0>+$)P1mP~e6T{(S3fRJ-M zZDJpAXfq?XZ!8OW9uT$?ykURl03!I&FZecm$Ck1WGbe{9yh87j*q)d)#(Td0`cypF z(49jwS2o#Q*|ns&q{-s~&=W9%>t)EXIFkL!>1Y@wDa|5NkIM`liw*F9lN_co0Xu=< zz{Td5;Z`gtW@m+NH$4YhWlj|Hn?uI{&&Lt~HOqbZE8O9_7DRs#k%HO0F^q{}I>!15 zKsITbif}`^>jGH;#{R&;4^=eaPQ(r$uXwzha+5-7yo531RhWjWQf3)*U&iv z;yk_rxQ6~oHx1;guNVP0+V_XW93#!OmnUMFsPH0fAQB41RvuelD10t|j4sIrXpvf2 zw-$1=b4UMyT}R`&sXn85j9oi{9n1nIA(c8Q1^NAdaSagzP{_~l5{DqP1OEKS);4;V zn~t5IZ>P}8`xvs|29%h5f5+f1vy;rP}A`J zeehKcZmK5`?2J~6c)>8kH-~i(!`8H+M#298XeSUTd^D&dIJBhwlw*Cq6uJfk9$;uP z1A-REV{@ZaZitcT_K0)jlMzf7)Pd?4GQgzo4P(fUF)5G{=USO!zQ-C|C4#a$UNAVw zT@?=M&>sPx@{*#VX%ipV$r%woE686TGXvh!2wQ`g0chRkA~IjP4o8ll02mKuJ7!>;qw%--n$aa7cgK3pnKA+kRluSsErR$HWiOZSd@= z3*q??a^blE&ih{XIOv!kgq{S<_pG1DCgjxqFZ9u{fNhc!xj_^hWITxw+kJ@^KpED))FL=b8$Mv2j&(9O+=I!3&X{R3xXOZK!LCm(#Z>@m zQ-j6Az7j;Ue{Sl~G49a_`z}1bR+w~=7^;d9C^UMjrBFW-0=-HNR66`;r9x(ni@ zqGr&AT@Y9>T=kx7<(19LiYt)%Uj95uf;dPYtJX8`t~T-htpXbwElD>6>0jCW0$?0i z{eo8vQ}Fxud_lP8i6A5g)%OT9G%H(kc6b8O1Y9bvMppQRZely!Mc$O7#Xm0A{fe{I zzdd{tDKT83i0!`M{=te?&#}Gd*tCj3F~jdm8Mhwaqu> zUh5ub4+tG|?1*Vam7?w|!_x8$zRc8Kz+E#O)l&X-D-M3TLxP`2LZWkA_@V1-<=n63 z5h_$7>jf~zbN<>#y82qSZ-h*@Pb>4x0Uw5XdwvdtL+&(-@XG;e)0bk`HJ3z?H${H` zKw=UY8oV%4WMmOYW>>UFfxqzn(dIWi64FaklM{JERce>Z&zYVTU0dSudU2mrzb8`A zHjC>SYdNlTtSn}S2&l}w48grvcb-&^>*>bX(_1BMK-HICuy7@sRx0!h7aXYxu)8J={Mi0JR z$?{fteDO0aBU!^rj6j zR&C+>3VhRF#HEct=n1V7LMS|KvvEm*;qxT&RCp#D!qfjmAL{LkyeA-heG?rdEWde)8<9n_O@A}c);3drQZ1C_yS~ydr}GMV zi?-wwZvbKH$MN>GmJZ~ph)lGVPQQkp>^>zg6c615h@9);+A!MF{QMNw5>ikvI;Gd> z_6N@xM(jx6O>^sK2G*9Vbz2egeLF5#^yqaW@JN4wcHVb?DcSr-H!`3@`)LKdt7zx* zP3~_VCUG*kTt5lkouIr2V^>+%F$B~87GoWJw=)ktRM(m4x9kUT;WFK}Hy}0`;uAW^ zzirue7QI7V_*+Qi_G7{nn$s@QAZ{fqPP;qQ%RjW1$Cc=L#KX6dFvBo$N#O4sF6xm; z!!X%jp`mo~K-3`mNt+AwUEd-j^r?dxlJ9D32DMX}SiaV_KaxubWdgKs&XC}c4dC1I zWQgqodkSP<;4fSgcQj*ZrWkIfl~sjy6({8I5StRVbp30qu*YpcMEY z?hqnov>p!gGl~7XBrw2rfOLfgj=9S5=#_cUx-~E~5Gp}$0exvuVRn`RbCn?0@i5GB zp_`W%LJ>+Su)+@&-JHjWsEW3JIieC|{zWFY%g}04*P8|6S2|LYj%b~o@cJBN4v$KJ zA`&V%XM`CsA;OcyVif4RcGhxON-?DJD*xVO!*j@Mu;eDNsTT5gZb7Daw0#zAFJDjF zUYBe?W&i`%VV{Zy+#UVi2Lf4zSUieSMXx!8#zJ_~TLi15mMU&T9W zuzV)vyd}=Ch_OaOW}+cwnjYIe?8e4M(2_yB#?=lE??2ezCHydrDx3TCit+S0YK@sY zmi@VI?vNGxGM9fA-E`bV5RjM|p?y zQg^xnXtpDuQY$f2Y(Ce2&U%Q2iAl95NaZ)r;@pG9FcFG=l>&xO3r0+Y8;v3!;|ja} zN?t@2=+%SnIF%}yien%eFHPr_CiF0hTW1F}?@4s(*W8GX(}l9~NmnG`*#n+H0+~C$pN# zf{Iu0_py0@ry)+pKF1(Bg7_CdVEDm>f-_OZ?&sk9Org(J51N2P>EkZRWVn7F$;<9_N zOc20*%+KPE7Hbc#BuBSgkj`}dd|~Km_Kk3slsy&g`&uZx>s5{6oszb9P~6m44To)g zKv^!ogS#XAc0W*rR2IwySwHJ9UFl**rCi@=;mp$*DPF3t_uO;6qq~s5#=6pRny!?= z+93OdOHICA8`eSK?qXy@OIEzNG!RIcUX`?y;S2)(WDg#+UMs z5XlPk<�_$#pLk}7z-pI`H?HT5jfshg zi_3{*lnUxfMdixfS5e`q`1V-XVRC+`T!|1A!XoV=MW=1Sn9i_`kDa!yF=!lGGr;xQ zf?bX*h`ds1vu3tn!pX)~$U>#9iPOk@ZbO>wra+?a{71s8XZM$5%k>fPsdP}u>uW)h zjU8wp?yz88o(Eb)ZJ(E1n;9&b6bd%){y96gH~D_G!!jn;LT7#A&7FFm>6F^t4KBUP z?$OuCZ*v9fd|fnp9y>F2UOQRo-imIsZ)bRFlD8iCJ1@+OCc4fSv7V>sE_YedW|c4G zJc$p7Id<1>W~b{~T~v8_pYh%y7MJjE_?3(VLOmBV`hh166hOX)Ky6u6csMcd>$SDw zQ1L|9A1^voP(L#C<1Ek>5Kj>z+mj^6+{UybJZwkfD_m^zxKpkIA9$LtbG$jRjU6## z{T||V8KDF_(M38s+warkcwDhU>Y5&&YREaYRGWqKoLRX#$~pacoLubpPBo5-yK6+v zWg@j_Wb|kGnS}@QRl5{D;$9aH%syBTpq-!-c^&^cjW>}&DqziV=4+8db4WjPi+qLG zlwzns8Vf6Ecf?08lgeFYlMo&4>R6!L9w*SFMj7Yu!_Uu88)>UvzO!G@y6(!)$&_)6 zP{~+LQl4McnFK?`tbG)y|}(q~>+_H-zbOOh!iR3r#Pa(3q(` zM@qtWEnOu+)Uj|eJ%v_M-*+0DMR#VRLpE>5mZw%#U?W@TTJa&Ic%gkH;=iF^om#Aig#ngimZ=iLG&Bd;etR?lls_lba(yrpB^_*9XOLfaq zbLDeYZEZQA7M=D~$D!+i+wSJV>D5W`jbR5=kibE#rQ?g6Jj5A_I%^B%0X7kB`82-(UYC z9pSz_lrFdA`VA`~c7LT6e`(OVG*E8c;@6C}t?&mU=O?iwb?u)jCVq|R1X$JX^r|@X zuB7@LYR7$e7XRq|y1K3=seZap_e&C675dc$pMAOWoy7A8KoBd*sf-^;)9K7G3lzML zUf=8pdCIP_HF91u%l}3*t>tFwuTh>((WRLv-LSriOL|s3`F+VOFQSYFn@uc#d&Kdw zVFovE_N+YoWG!fN(TX6OI{^YCzTtyol%jWPf!1|rCNs}2-hK8h{KgVjzFxhqv2~F0 z?AE>&u^C~+UD3Y9$E1B$BKh8LG$^XFtooTT18P6FAen$?3{ z4m|(}P|hZ5w9A)pZPp}cyN=D@2zH+{^1Kk&z;R+`g zURY5Q$0gTBXRv-AH@9&wrRb?H$@2^}6&SF`M}u7Y_c-#YFHBp>EblJ}SU(<=`nzq+WFBrWR_75# zz2Sr3(4MfVKG^Z4b##7aTdx(Rm@v9CG)7XTauTkLS_QNScI-5-u|MxMVZfVQQ%Olm! zW%=S&N=c__y&T(j*6}MfQ_FV67?MZ2In3^t2T_##8nP`5<}un{nX{?aS$D5PE!oyp zCKfR>_ti|L`1i)C?$SBEb`sq^ulN3YiZpY8>XY}Vr9E(6MEX*|$gG|wXW8@@fm06$;6VLj=Qu;g6_00W4msn#<>^ zJ1=~WPGax=n2JU2i)pR_kUt0)VaNTtmafw7b)GxBlrRFVhSO z(ACTJGd1CU_+sqmZ;v^#_L)Ye6@ z*)NVe&dwS_4H1$DZi%S!OPEp~(Nc2naV{r5bxt+=yN~x)3MCt}(Z0{r+&v4u%*;cV zWkW*!){TY!UhJDHPA48D#kjTSm1hevcT#VcB+X#Cq9#~^wHNF4I^R&azdzF-Y_ZPI zFS;8$WD_3BVC~+Z8`qx~>RM@SCOs13`l1ik0ssEkp`&sJR5-03KX+$qF^O*?T(cyE z{Cm!KF0-R>7|{f_iA8|HlB3e5w}?Lvgi1uunb0Z2CISu)4hwgO+J(j*TuSqusU`a8JiKY*JYFO{7YZ`cJR5Wj--OG z(X@z+hEUo7!L5q%c|j*m&q}i|nyFIk&(^-zxK-|MFtf72QA*ZW0;z*_kG=_;#@mfd ze{ruE2@hux-^rBJOgQ}h-Jg>W2u9QNH!e{@6sdRPoUy#cEF*Uu6U&e;IZZna0bU#q zJuDI9>zWZ2*Q<+>xDz>^^N7=q5#9Gr&2Q&etN)22G?4LLyx6v)^h+a#X+Z_!+6Fzi z{(|(=zd*_n$7dX{PWHXRZv52wd@((r<`5cz>$NW`ZAL_<*aiypY->BS60AGiTaIiq z0g^7G?s%@ifWQR*z9@fCm;3$Z*kS9(7jTZd){Z^g+*8b07wD(hBc^=T`^HS$?|;u< z9`89=FPNh9kneONFmssgN-R@5vGks+thBA)?+>B~ zU#jN$R_7$z3(BWePx5q#4~%L%_ZJ$d07jzW&`pol%yWl7zo$v<^!Y_I&E~9%5_=c< zqsm0rOxf*+yDPjyrlfCFGC_3_UZ(SA;)>MD%!|AT>Ph0Uju8WYxiKlb89R83Q zrV3HpL}(Yn%|cgny=hSP+sVk~`kl!cL#VG-PZKl6A#K!lYj-zA3q-2ad8P~qTX*<3KL%0so7Bym z7yl_gZoc_4{Ccwm4OfbY?f(p4(0R5os6}eOF@MjaQM7tXV*P%O zrTOm+5||n`2oognksS<(DOaCQ{W^e|2yhAH0a(u8`O@P;VpX!3T~2a|ORKbSZ%*=X zyK`ltbpeZ+nVFD+@3ud1Yixrqmbp8FC8n09aV_AIitL8h+Nh1vQAG0FbL{>OePcY; zAcgv;QJ$M4YC>bVc!L*sR8N*2t_AXtXsO)ruWG4e&bQ1|Y=LF%oX$|tF0n~K}sc0G$>Lt1)di8z2^~}*~Y*eE6L)$a( zL*3!>sE4yLApy!1*|aJB&{Q#m#;~+JwXZPT*CfD7^Ze&HKCh~cPEFJEG$Ie%<>K1H zJn4R_2Zxzje034~<(qJtFPW2udw=Zk^~8PLh465h{tShU^4eR9ClA{FE@ig^tW&R7 z)-wCBzk10ARea~ky=*0LZr^&C7o@ydLu1d#n!e@#Dw2f_jN>T4*a7VMbI)tEIUbbE_GfJ%Re@XmArKVL3e0_n&X+T5Y0kXq9px7Bp* z#)CJQv2fF2s==pt4cE*#)zkXE`3qRSp0xY1r@scM6^UZ~#J7$+Ym6eZVBv|9KYX;|iQB1n_)+Lm-qjkYvjl}un$n=WdzU<63 z($Ce?v6dt(RuQoxmL1dtrofKXs_y9x6<8Um^D^3)!17zKvwocI3I6CM+8I*2(lS?2 z)sii2Jl&O);Eb9wH`kK(n<&*&W*2gr-4q&0V@4Kq(>)MZRcfD)jr07>&wTi$Ee3TX z%N@YbiWS+sR&&;bG`3tXl{`O(xs;KUfBXlt+{x#YUmu>?0O?>LL42h8hb&a0$qUU4Y&lC%4(I z%xtvg$Ll=#pgb+4W1_{(ZmmVRdYcS_xlt=ukfUaKz9dw9EBRak1;NSuV$nU|4dPfr z)q1=e(Bq6wtLPX@@+?;BOoX4jGlEU~bYr;pZ9n5+Y~R?0(8C2?cp%-&m*x?a211g1@;=BP+hfgb15*dVdS zOC+Tp&SWIxrRUe9RC_NS?oHv-)op%e1y@(zRWBX%ZBGqZ=iW@;-7t)$-WZcSqY!72 zIc~XBV{`}g6piE9RLo%NNALv*R3&tn&*TbYIJgj62~8>5#3SF}&OwF9T$)vx-D&CQ z=r60CT{DYBjb5`D;tWrda4#@DOv}hXm#ANK;nnl$0e9RE?D%ft5Sf`_?H94#Ta@^4 z-OLppKiR(IveSSd^y*>UC{TX65jmq2{o_`Lf{}EMG9k&xV7X8IV{zHLS-+GTJC(8= z#B1f-UrT&Bw}QJ?bJ&12q7%Vh_p|}Emb;wAIj6iX;;}#C;!|$eNT{x7?Kc-M&c@=h zy|qlL#BMxs2+tEi>ec6oI-5yHNxkv^z|LlTIzIPB4yVE}WhJ()7UOQ0Ta?J_GE$}8 zA;**-&Iuxpcnx3ZiuH88oRVn8Nj7r0Jk*|ip(!&^)N3gdT!|Z06KlztsP$`qGkf@8tUxO^F*0MYWsQm81n6BrD7r-Je@aEp|6S$(Gl$VpeT;J18 zG9)@z+VhyyyeBxj-bjt{OxA!sPVQaU$v+-HyjYO<@7kik}y!>aNWx)>gUbt-U)995TyICwDfy#w({d`m0k| zE?+iS9I~x8U<&%3wu2zTCqHv0gLYgeQKmaQaS;F)yn!d$|HfD7iI{-H;l|^%Klgqg zGbLB_2KsEv2_&ddn&)JQXa2Dz8t$M2IbRHUNgxvJG2=I%5V=P?sh1!>{(d3rW`k8< zw}g>#*s=6cSTsNY1U-I6U;pKrqkny8)&JNW*WuXJ!u0u1X#DpRPtjRp$8w90VnA?g z6WL;)8ack>QMyEQHjBduIW(0Mj0HZ;#5{PCH9Sq>1|+Qs@e7^HQ)k)y>G%Hpvls7` z=y}6`J}&(gcajk!T?bqW5IlYA&hJIP*Go=I`w1>j;^N|dmn1bgLUq2dkND`6E$e$9 zJHlT;ks=M4+Z&^V7-qDi0M?7=$?LLc4qAfuGe`Yg9ML|M;dCswZa#NsX?9F4T|8?Q zxNz0nTk_jFJN4GbLpk9E{hJ#Jq6_Zm7zCig(;}eqAHtE9Pb421)I^^}hpYcA00CN) z{r)F?7|z)bSyPTA`a?0{tNYn%fpz=Ni+kI^$=RupuoHgc$?RaB*LHqqm3Sowtj0Q6(F&oI}znZ8h|D|l`4{aa!wJ#uCB+B)}@X0T_v$JlK^Mm}l9tv23 zrc)sD8Q4(7Q29R^Gn#NYUQ2o||t?B-H0q{oW7 z84!akbv;W8p}_~-yNqrXr~jPr+qJNcL~%Z|o@_P4K(cp@pET;k=)2(xhddqe=F+H; z*}l1T(7R&WrM?7}6K-i!ZGDJ1+4rD>{WnpEL4vtVc>&QlB*K`Sfqi2u*p>!GnYD?=^22q!`|0Jxkt|uELu63iZ$il@|0ofS>(ADXWmnfjTgw z*vu&DB&WX`EnsC48%qbhc<~}MRMKg>>)ZlLShQ%7wkJ)doLfira3p|qj*lPQ!tG6= zWMyU%p;A}jW@FP>8F8U1F205v3?D$sE98A0rB%CUXV+NL5o;~#Ox@Nw@Ib_6emb|H z^h&Mdh+C!;2ft`_x1e)+BB^QAQqA{M?Y$z@;CAw%>yr2#l;d>(R%`Zt1jY$inw0;^ z7=XE+0fFS2{=hH4CFpNE3!QM4{V?k&Q|?hhL-Sk-i&sLCpi7dJvuf*W$t1C@VcOAe zDhhS4?SC!4AdYD28r+J!YLs8~^CB)n_3c)BgwV)CX5kvky{%U6s!uj=M^sa7D+~q} zM*^ODU317F-5M-4ZnbR+&WR?u=aH9RKg%Hs&(!Ee1J;fR?By+^5DJe09p`cm4Wav8 z({C^7Ird9}f`;t3{80j{WLKg$=1Zz}2|^{k$9{gsw;Rkv3tmp=>u^XbSEqapwOkIQ)SYij+jQe`*nQnZJfABMg1Tj|orls5gqdq{aWp zxd%-8v5A%v`<=K;UK{Pm1Yv*twf*&e8phH6pH-cEyGhv;i{O&3kGOL#skw^PB*5Y= z7JF@SJcCNODtHQ&_<1|^dkeN3r>^_GPTbxd16uj5G>d#;xdwZH$sH7MLOXnaegT9j zOPv;g3t-2foiZmk2^A9ARr(emIQW*ZUJ0trN{#y*?)u)1n`ISSd5*2fp`j-`oAr3} zhH?2QC6T$xKG)`0{cJN`Yb<;@6u76+CIRQZ?ZD&n-uoI}Kk@bxDr#& zWR1SEzFy8nf!V#)GGW_!HFeGElR4%Oa#XagSk^kK`6I0(mb=|5`K>1tYdsP&|D=f! zIRht~X7A$jt}ZS4@xVNE2`q3?rtpk z0Fz9iST9Ke_1L5xe*RANa;&8){I)efg79>2UHf;qri4@(j|CC?+eOW5g{8nYsb(Kd zW%U=nCF?Fb7(Adml;LE#i%Rv_M7Bndv5a^uIi*U^Q5ebc3Gel)k_RbF(w_>l{5-HdpYl)cS z=T}Q@B~>m}B$QaJ0G48GOHyXKroo*lW`Qf`MQ4~MhNyuH#u?Q{sjK0|FMguaCOP~aCf{Gbfp8?9;@m!x~O8~R?s|6Md8VZ zIESMg9wKV2#_oWeQKVCHmYJNcu7Jz^(L0<(>B^A|8u+3!q8_W0PgimkK*Gqz1U{vC zTx{8l_4o3z#ML>yrO)ksgUz&_U6m;Zrtu0s%G&jPxud6lo<${V^<4r)gxK zQBjERkAoVqq3j=@-u}+X0$$LzkGIER+pPW)2~!lB$P1F#8BI_}>2q|2gabb$pB;O3 zRQ>F)uyvE9eY2zwO7=(l*rkl!j5P1N#*(q)s_nI?bG0|^MDzihS#@;Zf}iA}TjN2! zc9CIQf(vS$mDc5fd^n4HL!1t=@}tEhhs5mc(ZP&Z`(31Uj<9yP3Z=6`tbhrXf~@Fa zZZp#QQ%iT#$6k`dRjYUz7HyGPDu<`T1V?xXp^xv%FJn_Ka?RepEjUIhbn)GMQr%y$ zh<+p)0WyHxNN%>awe{Bdk5s$J0fuUqz4X>fA8Unzf=NI1}te%!1h zEG#tYlH|1T#AVED!#tCn-55B+QoT2;184*v1V7tRc(!Toyu%4sh?Mg<0r06sdP1q? zupS#byWOdwYsQwWNdorbbshUE`cHpzlq!!t$hEY!0aro5^$T$dQwa2Y6a%n&CkTHe zy3ewE=zyf6ov%irgZ`uhKVRnlzK_(EjD1xhXh8a`i=gW9KEz`0~yQxks8 zrrT>X`1-1RPTi;Fd7)}!A_CN7vX>t+ai)Q@9` ziaJ<{^6^+#Bh-F}Tv9Js?0y?A;J1JJ+@9@(Y)<^wH+PZ}#q;?7xcTrbj}n=A)*pBo zYbg1@gPL>J*!iv0eOm-wpd{6QIJPoE$R5Ya-m>T6d!5R-Kfm8U_rv|Dyx;HZdX4An`C9T; z2kL513&nUl`y)QR+J&{OG+bA1X+nR z?q2)%K>kKKmmz)qQ$ts*RvTH&vdB0zk~najW!hL%V?MRDUjW0H(>g&$87%8;cb~qU zaORI3Ut#0^WYp^>pu1MzM7u>!MjCAxLDu3iRP4CnGLX4qEYOE7TXWo76n34MO(kV1 zv+24idUiq2{U9YvQ(QM!Z=&C5YpMN#5otSWhrmPWo(E;z%k5{99{`9IoIKcDj^~jK z^Kjb!Ssd>%Jn;O5B||6cq+!=b{;#69^}VBJ%>dtOyHiJgK@^%?rJxUznBEyVmA}f@ z1_~`I7WS4cR|8;<J(ywWHlB|wa%mIJ z_}tycjzOi{b$bShhUdAW+_LcSo@n?C2|Kz^*O4!l4D?WoI4lgjSYET$d6JhIC;maC zcc+#wBjCBqKv_z@l+Ez3pRinoGbqh^|NeZpvN2oXjgh*y8}O~PGv)gep?-qLBhf&! z322zv!{c#3iQSFK=mL^FHY|I5Ff8&V>m70!l|L(YXV*v}(3qR&URKR zP<7vYkQiq0r14@Kwb-CV;^#K%@%oMVPHDb}5<4r?@fklpynEPx1k1y?B!E1;SZ?iUP8f&x934uC6uZ$G-M}No>LU+I~U9RHOuRTHuKsMfA zq-YJQ=zS=!5c=x19UEsqv58S7pzk#UlCGg;-@ZYwVPVD%{Y>BI+u>sek{)@ydT+ zejC)AT^mVnqoZ0H7_4yV&D70l7@u51r?NUOcQC2tUb#JWZp0(UqWEwtBRnNF*#yZW zipR|_AZnd*ZG5HV<;0bb_u_wlZ4q%_VWXhZrty=t!GeGcjAolzFG1ERFZKq8*qHA% zo6|0H=yuWFH7zWDz8_B;1l##Kl~qg^!KE9ga_}bVYwHgFdU*_}h1gm|7mu<289eso zKJ@Wz5pG!9nfV-hQ0s>yEc;Nt%Mtn_7k`75++JlRg>Sj4<96@<>bt@&-f4FpV{uKVLQFXcW09`?(D?5DmC^l3xN6@z~^Uv-+#Dh{MD`TD!S<{v~o`As%Xq(wS)Ux0_> z(os8D0L#0HK3Z|{td*ta*#F=c8k_}l_OHs%IoZQf1HtnV8c1IkZ2>=bosIcB z``~jC^e{1yatl2&7YTQ6(#YbPI%=i>b3%A;H}(}*nU~MXiQYwWxb_4FpRfoLpu`-R z-`%$5Hk$zIKS5Ua<((?#4o{9HdMwXJ)o z4?Rp6@%l?Y|8-j>U`o)fwWt1BFPg)JXZrH=t$b&%oKRSQz*`iy-w)gMtk7?Ksv8oD z3m{wAPj(Z7)JL&)xnQtYtcatxm2AoXQlFsWB{a!n>3_M^5*@&Hd-zHP(F}c(yd+w_ z$mOxqe1Mr$U&T!S&-?<=*qe*(BKLka^c9RfB2N`qz|5SY8_z)II6$fJ{%c?sDY*5j zi$~lwm6U!>gUl{4_JAf@iQfz(@tYqiD=xagIZE=!|DMuz$URWMy%py1kDY9((ab2_ zx-9vrKc)Mj2gI()SfSH(GECLR);3H3T=&%XNM+VC92>COIBlH-TiSCHWq_Z$l#eUI zolR{{j}^MY1*L5C7MQ~SfBuOYVA%LlKArB&&nDC}((LD<@V4$&QH6u0tNcje&`+N} z0nsmgKC5;2jp)u!^D6$!hw|h>d1MdfEu5VBSzQe7b8sg9cW#W55w>$8GVszV;WxHJ z%;Crdhj~k^H+GB3x{_Z&55I^ zMiYzg>vt?I3eD!{RGwL#`k<%SsYZAd>GO=g9`3HL%4o)BJI+h4DU1iwuXQ^W2L@T6 zBBozK4K8Bi1HQ|h(Ok8^FPQ4V1?e7=W<&i}#Wh#q-fvAkc`q^c_199RC+K+z!HVi@ zmqyU)115Oces>!8s?rSRU-ln%VjrBY+des?Ps(5#;(>izQ_}$0VlOZ6UzL78Eh#BU zv)BUIxg0BRyc}pyc;KM`g*7AbTTzgdDpC2*Ad!-%U@XgR8{6jd5tUuZiJ6$(@1*P% z;7ZvdgExP5SBhsJG1I^Sg9a-gXFb(-T2ysjGW8yK@ThRLygGBX-v65N#?EIk+db9? zx(bIQm0!NRdHQ1tc|lIjh5t6aje|TM;yk&YnXxCa?ODf*GdYs|w3l`Emo%>Zo_+H= z;8pNDz505}x;jd0h(X95f{B+MWX@udW)l-n3(n)!T|2w0+WG1)(feDFOO$|eULt4m z_vtE7nakJA-dmeCiO*@it3?`mqx`zLc;Z$Jdu;kKtnz;1*is)7~I`+j|Gm82gfW|g0O42P!x&_Gkr5-=wLOC-n4YO9x98#GH_T{Abc)`9 zR~%Fv(rh1Sewxb=`qNAdO~6(?|wTbIA2WNh*GQ{2FMP1L%Fd zA}~|0uC5;MwyBOQRn76>#*bT^*=}zQ&p5iL*B1B)$e&dblmG{6`JtuwW2$e%Ee%xq z1J}-fKYjAIpLbY98!-)L>^^CwBjm?%zCF*BMUK=NUX0?$JlmLa>LfOcvi2q#9~v=a z>ws-8-|zeGi}>vrOQe92UCd#+=FlE{PSm<3X64iECFb!V4;BBySt2{bd04XBu9L96 zjYT_eG~bpY3vcs(`l+BFz>v_aluO;Iit!T@6QDNsg`r{3I^}>@)j!ebFCJGS5tktH z1-@Jp34IUt|Mgl_q(xTbj{4jhma?wt;C`)l>1GGg<|nT z+7{Vz_bEmrba5)MXqp`#x(*dHz zFAt&7=qF?2TV!Oy(pfCU!Ll6}?_WdAohzquCb8tcytwMi&L+IR6Ct|0)x11tt!OR# zOxAj%*M9!6N3yPM9ZY%tfQKT={7R@icMRCjgjuDD4 zmfQt(vHGdc&4(`(sE{#swpu7Y`P82hKt!t>emkE>*JwL8-AemK@8eTw0kk~gB6n2j zSy({KI&?6;`o!Ga+}e8Oi`S(MWqWID=dzNh!)PVd`{Ns^Jq{kP6Ynf*MqFyzPu3MD{7jAHWt^{q6DuT?G#CqxZY1r9qdmq7|BNc$ z%w*PbR;5p2@`pG`P->)!^EjLa>_MM>kpmQ=iV8&QO_Tacz`j78aBqehSXlHcfgWb+ z-?nHOhV)Mkqjct#LdtI>B~j+x9Y+j@=+Orhu-D(#Y9>Le@}&@&=_B#JbGg1C?zQ zQn-IIjR_ymX@$u?3FcNDxZlQN%PtnuV#7_W`5}#?NIeVYvG?AnLokWp`A+3}AMy;Q zxoQ9GA34HaB*F^xFi+_+RS+$RfBw{!r{53{ieFS>l7ye1>l^$%V1iPi+SxqX#MCU8 z)a31o6NwW$k3W59myQ6aiu1Oe(5(&NH?(*Sy#mQBCfZQ0;jc&^zv~T}pq-*C!9P$= zUao6HA}Q56eitw%JUkNtW=t6RyKafLh4{1aN~I)y$kKqt-_CE6t)C+>+3DqcK?Yc~ z?3z>D%9nc};nB}#GF}J9$wq{QsVMk;bI0sb+kU_OGdI)!_BWh?!5%PhH0SfTey(VA z4CQ;G&pOq687em}j8xJtO9=q$ag*dHD`!w)e+_bO@8>O?C6e5K;gwF&X@`YR{1y5c zRDMYfkmyVy;7eNkQrKZ%uOpx`ys}Yioq%A{EGm7%3x+>a>B}i}tIzA-4C5EM|DrR6 zW-+p$(t$>3@;j09_?=@w0@z8=yxp=a+663n%IjAT669>g=3+zx@q*$8`BO{QpG#ie zzW)z`5*N%9sNB+R+WaSS_>}}Oh$@Wtg9S&8&hW_%8 zctVv;qi^STNODKtSvjm@?1QBdodT+yATvHSR`<-<;_A%1_a(O-E${gvN4BftIB#=K zjohfZzyt}T8+AKdH!w5)_Q415$LUZn$F^t&7si*2K?&bXRPjDk>P2snX8#jy$*%d* zxS!m@cJjlu-{=ggcJZb_BLBN z3yw)DxOb^Q-O*3A$8$A@ZAr&H>h$=JQS7)ZgdE5+kF>-H6_k_+j!Hay*c7I!(+?cX zD`)&f=>)Kzi6D%^Xf@~YdO7X~lXsBJZxZURU;K*Cr5Tmpqq~FSM>{Vnt=rd!M-zd$ z++tkvyHgqZ8kCQ`Z1RNPYh#>kq1c(#TK4Q;U1MKmy@6n1Z8z_fqFVU(x;}eVq%$X% zGaI$1TlndbA7DPsL#w_|-y?aAYW_XvqY_5DFjTgMnvSCaO_U&VYntCS>bjVV;+PAM zxhUWVRe$s8@lu__QGg3(tKfcTMA5bI+DKrgpL5D?iG^|KnL>N56tUc9{cw$e zy&`n=dTl-BB&`dWu1qOfs@nd}^tZ$K^B*nHx!h94_>dYDJB*EwD+>O64cd5FLnlD8 zMKx3Hqicw1y73?Yy9e<0wf}sWoh<_y(e=8;^V~u1Tb6!IaW1>VF0DJU*V4JvwY@sOmCxCDgmkshzDKQz zfI=$A7`!vjFIUNFqgz$qLo;tiAF7;MlVe_rT1059i~x(U9cuadWcx+wt=s_lki0WX zuJzxXxY3h5Qmyi-WsWTR`a`~X6--eu9_sCZ-jVc8%actTZggM>qIJmF4LAb>!^&$U zySxTUD^Qgod4jZcrGpz6JmhkLx&O&SoFD74M>2M%21UaAJGf+! z)HTG7t{YbG7%-3OI^O-a?uGp{6F(XB;NW0!mDWqU&dv(wyum#5th_@cP~bT#A9Qa_19A;iwG)M5OJ*L;pJ$t5Q=p~hzCBD{-8vW^$e0m;XNJD|`;9+{5QgA-#1ETxQu8r�oBi*BCmzcD{7k zUWOKDaBew9x;Si<6pTaDS;e=JUx)BB&)+52p1^P3Ici7aH6rRo$6*q@>_Q z8lpffF_ZUi#LaIii*G0sy>5oF5oAHlEt=1vACZPU=>F0c<53uY>cb$og_5H{6U>$7 zdd1^8sTDwv)PR~WU*uAhTnh6Z*QJJ4n@*y%qgD4{e~%|lhl~yn>$2;#h{w{j`%+ou zd>MU)!KRlD>N=P#e=zOg{_NQe&78a6apt4ZHaK}G4*vU7Y}BXs>LhsKm@sF#Ef|{l z7sj?x2vzL?Wx(t)bo6{lLAzu$`tD*P!slKvom@Z#hh8Ym-JSmNggEM#?O3hv#zJsj z7`!)IduQ!B@l6skmQ+fC;vroHkB%w+Dw-2bz%5kNGc8qeLd`CA_H7|uI8nc@ZHDkA zDYPx+H5IC5Zte7~(9Fj!VYlP_xt)?spb9TcrlzO4_M*sfwFvw8Zu5%>JDa{>nzvro zpzjMm_xR*0p$SDeD>Fsk2qMd03cH}~426d(M0_uY_75Desn*Y7NX;`eZ=R@GDZr6# z-j%<+xb`=64>u5JFD#X>wQ5l|3eF}+=fRS7!%(!=m1|0FAM?_CJZ z(ONaHO1^KS%U97k(qnb_oI>x-PjkPvX3`-`5@OL7Cu)tIl+a;T@L7C_^i2yKg|F2&7@lE!yyFYp z2Lgr{Gt$J**WV`BsxC{N+Ei5?2;r$gehWniqN);PuaqPFD=iu#R%D=S|y>JI+UO_Kgwv;Mm3H0JR&6;O_LQj!anPB{BN;8 z@UwGkWXGcnj5Mhq_#XX9LIl{5$JzQwV!&Z}nEFE1;a*i$etw6b5g?HENA_DrB-~

|qZ_4lyGmEjp%Us^@^ixRetsTnuY=_{D412tW9iPu~Q=wUeMb z&^6`+FC1G!q+tv8#H+y!kSxNs9^#n;AeSIo{hjH$PiJ8aGhyJ<4 z$k-sa1^v9}MFID50UZy_hMdk#5`3Dw+ieH^;?XfN-@8Q^b8qB2nyz~F>gc+d(M~vH zero(g2Jm7PSA0i<;E2 zKhnaPY^bnsRAy%8)(y8g8Jo4TGBj=0363gBf0ff0MO?A?MRBdS>uO2){{XL$ARU?( zb_lNdgHiJ$XJjcd;F18R?>o)GXD{IJ{JU&<6X)Ol-K_e<4e>m0*k^LmS^ZdDUO{v4 z42HjKnv#q3?uZ}G#vhn;k_AzVZ1%UgZ;J1OvcIvnT>PTk43>X^*H-j#Tuv{8&51FAna= z*cU1>K!4Fd&Z5XYv~O&4>fxZR^xlLYBN#V$xuY@5m{*BDNj{PMgwZGqH4T-$xSw&2(TSBrrj{ zCc)Qxwj#2%2Mt`BGrIX)tOR@~@B9_xSe#iWL(Eu5y958^@QDm*FaS{5zA}ii4Zg{P zpIxyzkWwfcQfoUnmTVO7=FqZwm?FhCa<#;wcR{o6?V*^848BUJRx;KPn9J%F!9%>x&H?_jwhW}N@;1MRv}d|GJfBaOLV7$UDL0)ZVF{ua7cU|a!>y+KG>C} zYss`y&=rb*U5`EgKZqBh19kv5x3$V~Rjg-J?Pdh4@a$pT+bXKf%|g^=0vKFGpagxxWA`;syHeWT1Zn>9LV>&wHH?vWcg)+K24x-(1?{pI$n7T?b}>rH7VNqAi4-evQ_z=wiBcMc02ksM>O=Dkd^{ z8v%9(1p>i0k4c1%;^CAr*BeSp+p@SCD(ilj=CqM{Rh@gu+^|?(KMAf42b%|zG4N;; zi@wy_?LVFnz@fhW9+-{*%aP4CE&`}a`;J2vrY{K7*C4i;=d6-)M_70_QG7R%_TZNf zue~26o8AhO-9LCu&TB$!1$o^M$a(h7Tr#SbY^VF>PAD1luDxbJaaNB(NO@FA@83r` z;3ZAkU^eBX=TC!GQC7P6A*Rm!IC-8ixW}fE2PkZB%g)TKZ7wJ%Sbo{&GQ3r}RhJPF zv-qmI8`2|C99#P9ZK1V=xkad~%>y98hzvX}zIFRa@K8+gd&T^4GVs*7a>2i{ZJbyw z!`$cKj0(7d;yaN+7%o^q@i-f0!>C&8g^6q#E}*IW;>jkbE%~m2Kv&sst$<{ zZ`RvPFYni2JXTaysh{!>^B**N9Ecyrf6b{{&q`4Z$WBgd&-@*l7<*N7)P0Fzyq5LO zF6bwb43A>fqt=*faR@5bqpyX=%G~@||TcJc3jj{z#R_G&qQ)4!d`U`#oF3TfQC?TyUNaqX3;=WcqgtFiHx*#Xz|t6R7Gl83wHs^lj9L( z9~GI8i>0Kdx{d(Q?IG)7@n$4nFp@)YWhlc}1|8n5#hrkIep{HOpV@$pbDa&-*4Gbm zU-cRGSmZnFXP^c&WQG)>>@^{sp4Zb9_){m62s1-~cz(e9)Wc4b>lo&FKdFxH{TSom zxm1FU?22~!c3R+-R*h*|v%d;5W`9QEPKLcH=f8_X>#-%$yHz-}_RV8Ne)F4D4P`tqDTnl!*z?<`bW&% z1~^8om-}CxtPxIVgSdX1FSyt_bU=DD;$a>NKHD|lf8(wa`$4qEqW34u!UDF4_scp8 z&#!DVsd(L=m?(d-618F@q`LgB8j4qLZNWsIIzlm3<+>f@zDRsHI{Ia)r2!3G%i*qE zkjD-cSB3twt@@#Sua#AqoYz=iU&-%)~$R%uWk zKGfsj>u%Qn!N3>JB?T|k$hIdVu(4Z!qQ%=5xH~6{m2MGXMirH=TU*?@V5AfopC=pl zlk$Z_^{Tx5)y9#Xuc<**rDo>0#^9=M?wb4^n{^Bxw&(JKC-MVyezI|4<7DU~y~Bm? zfIiWBaoydV$#-!tV8axbup|b$J2l%}k#w?88QOc!HLPcY^#|g{RqzsTBPS#08G>5i zaf3bU5u?5ul=GETy29K7WoFMa^K`Bt?vYZkYR?{K3eZ{tz{GLz>*1#6<+<ZK%?t!(>RO{FHD`|+|O!x$( zy~fWWogivbcq%dQyv{M32jI*=?-%MRJC9UPtvOF7M%VAd>g>NUT5?Bc1F06Y&2gq)eDI* ze7@$04^*jiPl@usUEU72<1;$O52jLkH0SzZg&J0#K33HuU+iIh>;ir z=JZ_)+*091=5nCQuqAc(A@zKXXSRE$gx&`Tea`qdW|~hu6qabyBo3tpexg1qW*2?1 zo*LQ=$&QbXrER}6E6-DGCf`b^vJ#wo-QeS@%mEr$m4a*jcMT9db-R8=_4zk`1c zk~gpWfZ)&zIV{BmB{wQ&PR(7vN~a{lLmL&sYxL&-_c$xWe6%QvgXbpxnj(;x7mknU zDtCkUMo|xaAl}M)<##)_G3LsC0Z!;}dU!B2ztGnpOhNPM@7>$cBp7&W#d>Exh_?nEu{nE;4vi_E=P|6Cc?O9QJTNG->^`UhTZYe8CjyTXzsy8H0XY)?$Bupa_Xm!@kydX@*RSTd(O=-y%mR z+X!Ncn~1!;U8}(BX%S(6&^=c5*Fyk?mCP+%a)l+DBlQ;@GDidL2Zi?3=Be2$ZNu;{5~`8Q zgnY4kafc=^{II#7Ug@^4o6V#X$sk#dtuXd&3!!vZUjQAwKW;eb(>d0bi5xw!xTnVr z>3C>pgzl{rqwwUxJGL=3#K6Xs3-Nt&zp54!XR6gcc<1AD@c>hHw}2va-oGM_k{@a{ zmVbwSHaIJ9if~bq=f7!T4DrWvIG>yIhP4NeIdG216Sv*~h#O#6iP8nfh2d_e0izyo zvkjHRSClyRXO>mw$WDTm3ieTU_^=#9t zX-KO1ZbAxS#CKkFNNW7G(w|h^e+~F6Xguyj)zng`#XK{jh6q?2j&NRrR$MOLkZR+7C>KoZ!TEQ9EWlh?A#q?+Eo9>#ez|<{YLSaCe_N-@4RjM>xzf#-@nUS`~V4OFlVvh z?UkUP>YAE+zkI~+ii*aHj693StL%n8V3%Lze*#AV{59kR^3JF@zw0X5IU=GX?Ci z*lv3eznkm}alD1-$ldyR%S`QEwLq72uRZBK>pQA*wwbMgd;)D8@GiYym_~US~(wt2Ze%_YSIzDEy#~a9VKcv^^sAuxLLU8k$pHhW>cN z1Y@SVHz1V+cl#EA?)~I$kVJEZx@Ess?$V{rf39?KHSbO#2j9_rCXLUz@^magI>RKh zwe<=wzCN{M=9ERI>VqSZVnal4-jWY*M%|EJT@-M_2Btifz{rEP;vI475AnXb*nUitWxMs?u05xa@i9ZhMKkMBT0X*+lMK zR0TQjZmS8w*VlF!Pg;n*9%MiG8=L*TpqqqfOx1osgv~_)`g2=D8D4wmIj3s~TXSw$ zO6xI({xiPqyEs^L#18;<9Juwv;H_-5rrq8e4mQ__O|SLE!ukg-;!^g8mGD0RORM?e zTV{VbHz0Th$;|TfB^@%GTU#_ThXP3lPs!^?(l=YLmrXX8W~?}T?FJt-KOg;i>z#kM zI1>{K1ns@!SkreTM-_lN?J2VjT~tqc_nn4~AidhY%#g~*!HI)5!SrtGme~>j*OCs| z@yU$7Md(w{xIqpQK@NJ^jp#2j4VYTJNebFkU;h1;OG%L|x01MS!4K{mog}$t{DI6F z#W)T$fq~JhSpM*}q^pS6!p3|<^W?X*5A%~Bi|$v6e`0_=YtUF2EE(g5!5apCJG;L+ z2%aY{(z%V~mKQ?;e*3{e#_i0nZ4fkGYJ`tUP07*sK%7-U(Ydpu#V4a~Q@#TMg-&el zz9RAM+qdz%U8xp;4aQ&J%WI%PJFtTR$pVTM*3Re%f;F!)xw;50YL6tHeXq{xgXN-v z2-sFjyXvJnMO8z}vOQ$p!+htZt~A>Kt~9J&|7|||c%KNX+=|QU&L>%E{gr6cL!;}) ztC=RRH+t^*z2}oz_r&(TF^t?jXwsBEIL@&A;6nttuP17om2FWX3$nJR7&{XIzoTW_ z!UqPpzez32QuUbE9NERDbPqR24m783)gi8ae*e&R(v@Ui^)czn0shn--%$@(Zc;2_+sCaQ;8~V`YVOH`nC0+l(fh8 z|M22ORp)XQ(Pg_JeOffv{ui!Hf|a6PoxPzE%uoZZ$6b5^S~1tP7J;_5wqpBr5nnm+ z?fSXA2pg)xfq{NR=}vJNJBzMRICaOo?203cp?m%yBeDKwAmW#osGx4?Us8EJs(PlcHZ-D=*Pq7yzl1kj8OTR~4lSxikj<(^)p zfFgoz+?3+_Va2ByX;R>xbRvBfHpj7cVK=hv?%H=|E^7^OJgq7-{@z89jU3sL-ri_Y zT;H_Hl700{)P=0RIWnlpjhF7&%h^ef?xOm4R`TkR%WATZ>K$msA>Q}&G7nhZ-CZpB z#VwDzeWWwH@}OA1joCc0thu?F>V;PgF*Xw5#SojnweQIrD7DtX|K3vngAzFQKL!Ux zF!!+WEiiH-H^}rQ%Q;kMu|Y6nq!IGU#6MU3j=)vI2ylSA=I5)xJjI+X!MOe&BkkTH z4!recfH*- z9iNY_=`IYsyojl zh3TdRTigO#x3OMCFWI+-g9FIW8ZrsC&RrFaVB zk z((R9W<6p`$ytc?Y2}stKYxo~2(Q)JAgc5rd7;)hnn56x36cfbtq|p!;fQ{Qghis{(9E&a3HJJEwzXHbui2 zN+y^Jc(M9xzAAlrWMv?XJJR~71Aqhm~1#^N?-Jh|i!H0S=h9hZG}ETG>Oy?P%_R?6>u z*RatH!YtN5oJ$EZeH+%BcTgI8f9c5TQg9+EZAAPtQsk+irkT2uo)j1^gxg4$6g?h9an&!1Cj!O;@v6mzc zJl#5#1GZ2ZQ4%)4W3BT5>bl-~s4K+(4+9V6_!AIBAei}(cIO>nrkqs9L3@g6-L%WN z*?TonYO~K&8~R1|nW$5kiSEZ$gyZ)%to5|8Pk%R96eZ!ejD5Bz-8+n{^};rt|2K-` z4>&Ig&4nu&b_kx=&>Q*w6#ROZ^dVZ?%;>=G&H&7t--hax)Il$uvluQT^ewX$aOF{0 z(rD}!V&o>oiMWddJ>iHC%AQA-?6M#9z_UNWzgZCX@_VQyHni-_vk-@t{6yU zsp#1OmRupkip|UA38$q&A2V$iI_b=?K+;asLG825tU%J9Q?L*+z(_b-Mm2I%O4IRa zD9y_z%jok<$OkOkAs3OcIyC=@%m^;s*~u>Vamg?P5&vkZb^&Td8n59&3r%FWNYav9aWN& z7uJfbja)u{Nh6LqQ|viTR9shQv=Ih&c8Ru)k-Std&K-#$2>P*X)-K7+KL`$7J2wEC zH=n!NWrnoc3&Wl?07d$pME(8th`@^yY|C$o`S=}6!d)GT63q%I_Y%CJ8ITV7at@Gk z&#;&mJAMm@!%ezA=HYM!J2 zxP?Huj;$E4beh#kXN9&?j3q~@jVZ%>?5u%kbXZd*3pBW<5^a|VE5WvQtuZednBw4zwoKdl9^FC zN6)N)-}2$CM$BPqx9)tPGuOu-IIagim^&sTx=U^-2M#;n@y#m8_Vvg2@^22jT8 z7p7Kc34yjhQHcBX3rQ2nIMi2)CFiXl+pO0G#h+) z|G4JkauP5t&_@~fu6RMmL($I<`xCoi?v__G;`A476Ad#%0*g9MtH1L@x)68ixn->g zdQqoZG&Z7Ne+v~3-(eifgJXE$yT7S8a5NNl;E?adKO3jg!Y0rFqZhAk*1yG^H;kGs z=FSrnhRG6SX9OQb_2;F*@1@oH@D?(Px=}};6|dF;jOz~;9EN0P=^)yS)8w6U zF9q|LN4R8LLMYF5JfCv84AvggorwJq;kR!- z+7<_VRU@WmPdu%O>m}72wgqeBku2KUGZsm=OZ1t1Cw__!zZ_z)@`W<3Mc?XkxEOHu z$VMTuK@F*WfX-Kl27Rt&3P4XT4)2I=!UP&9r|aX@u27qO`p<9os;==G3ch-`w>$aL z3KDC;L^-&lcKF10pDQ}5Odn}1#9IpZN8^+M=!K_rA;SC?7A^^}!C?Tid#`6iV|niR z1bx2sS63;?{+cMgn~*U#{UnH`{C%32MD$)Iv^W_0cm9DEc6>s%|E;@v3mALt+TJ^e z9^68!-xp|ao2{9LBiwUu`t$|HLX#jKSWGe$NHP{IGf04NCz6qt;xCP=%q@HjJ8~Kz zt;qo-&Z5yQ0;CR}2d|EG87w)VLDLa?l)kp@l33oR5DY3!jD?9u--o%GD=1oI>71T7 z9V~OV!&dy5cw0L|mXUOMt9|xR3D-ZM+vnwk z%zsifa@Yg;xAztz9M3ka-yLsZPIxwp~mL``(Jo zajTf#O?_~DnqOJy`uJUKEo#rQ{jjq|e^daC%Nxz$-BYi!$Ld%7=Qmg5N~Hh7vdV%n zZJA62Fs4xR*N;2pa3pnKtasSWTsro6A4jRvM{saF8FB-FW3&ko6lqTVABeRFPAH=p3P*GJ+R7Z48h zY-jHE4LYFW)e5e&112JXuQjg&bqe)Wzq%+f@T<{ANZ_*!H;qZXz|Q)}U=jXBJ1E^X zl!3M$Fsb-=(k~vJHaKEXT2Y1*7=gPUU%k;1H|U^S)ChfEWRs~5mahKM$y09eEUQ)X zSX{kNv1HUrnQC;o>UEhjo^#TOI|Zn$T)RSAACN>ol0#2MaKthNCbuAXppXVcDsCw^ z`pD+r>tla;Yj9oZl-f|tDJ;lcjT3zL?if7a>|HH6u`;$u0|x)7VPZ9%C9rdj=;)BE zt$_8o((6jK$c=l)!a|dc5e3frkr0f1`p$Dn+s7yd)m+L5(TCe)Dp_1N`K^?v-Ilim7E%V7A?a7PsevBepuGzDxA0Wb2o_|(<32Qye-LMgj9qQe`X;M+VT?V@(^3@k z5&ES6Q4m6`4FD8qD|g7d>9{j@UkiE-b=3xLvLAUiz+{&L zvU9qnNG9O{4oLq|QU!)JPd;HjBR75xr0)M3TyJ?SR!q8PdEG8a79bc<9iUD8?KhuU z)^p{`F~A_aNdmgj$p|Ub5Iepr_0kpw(o|;Q`ja+DsmLRh1Tz$__lc4>*4vT3 zDX{dDuup)udz)1DvS<%$gX!c?ZtjQ2a<_;JE+yEed^QXaNYojPSBx&jXOJ<5n?Fp- zKZ|3+--om-Jj-GF@9}q4PXbiI$&wgXO^17DXprn@v0CcFW4I@GP5luEYn%#Ax?Z)k ziXsfhYaHq}IM7;Y4f{fDBZsu5A1p(G(y)LeBP5@|6S?C6?*{O5`1j1bp~Vm^S%EjH zmO7YqU>tGu*o2@zsq?BPWp{$!5CWZV94|e(#|L+fE%vH1^fyMnPs|$adfb&ddI!26 z4Z!!f-F;d52xO`9JQu_3MCaRzbKd(2EJp^BHbtX;2pu0S={uzLl|*phGP^kBX>pwy=yaIchX=3jwP1l&E1LMC#zq37sUm|1wi{pcM9#J zw_!jekwjzf z4SdzrFlmcMf(i`$!j$27O|>eG(nL5g+xVFzmLjT7#*0nU&SBLK*ivJpws7weaK>Fw z8)c*Qb8z(#I6egI4~QC)Hw~!8_}HE7;^nfJZTp5ca$@1kj+xBCnmlpy%*7LW2hy0& zsz^w;8+dZt zb~3sdOMT+CZ;>W!XcmKc*V;o+E$ZJuH#_F9v9I}n8Ax4Dd^F~l{T@T{#KO#siu9qC#(tdXig&@Ztz4XZ zazr^>-;a2!Mt$^xIo0mSg@2B06(<(!U(_i$(MS+&F(bfn9TYECYz&z3hiclSS&0=- z1WbpBsN&hk;dkaV_B5Cpp?g}i?j4=d4;%1a;i09+aPPPIxI>Pu5-FbW89qRswJiwi zP!W44WzqPxh|xxF*dhDZ4~;ddbZQ$!;s3Gq-0@KV|CcB&Dy!_WLPln`Qpv9Dt!(Fz zovp%0h3ssy5@$QkE|u(+dG_A2PWJD0bf@e4dp!E%{djlY@AvDu_fseP>_k6Nnd1kC5NXx57`pL;8rbKn z-9n)UAAcT(gRfFe>ay5$7_70N!d%1<@G!zZOC&$|p?%V3ZaaH+wT5=@^4n2CRX zG7d}P=nK%80LpO$7c>E3cQ-Zb@E4C~+_C+rCYt-Wd{>99c>LYo88Y{Rn<>23Js46M zy!b1k&}s6H(h+e3xDJ1i1Vrp%g#zL*s6V723hZP#;R1q>R&`X(Ls09t?2jq|(a1WfZczHzzdvLMvEPTLLra^~ z-gkggMzk&=n1DPisj`UF=1o5SNv@VZZfc5+S7slSn^=qX-Eo!Q3`A zt(;rfwi@J>kcgL|b&f)!g(8}h0VE=?QWnngnx2H-{f_<9R$?Hnce&-ZumRQst$77a zgB$Y$tA|7g1@aUt{w$KUeV=i^uKPLuT@tj(T7{7vf!`jT7Bl=wc~p&yMy=X`wf8>Z zO@GL#sVU}$iOfOaL;5=o!~|Ps3rmW1{NmK&oX8Q}&=u1Yg)@AZI|n45b@r}R9SFBr z4;4E%@VlZE1NFlv>VU*!7@ssXz;Dl!S9}x%rF#FI|H2>qN$!I>^Ys>;8Ei53!Gm=4 z47$JeD7caX5_TxyMazMO`#%;yMe>C8WO(JJ%dDMct~(ky4lDzn`sVMpU~#io2aiB= z@WDmleR^9tJqf{WUF#tV1e2+>bdF7jYj+5H`E07krH?AJ6=MI0%!_~l0=Xf$&k<$^ zOf|F*uP6rdF2^HoFe!{$fb#w}P|p$<7mg~7Am`P$B7_&6>ea=DX|n0LZGF1Sk2--4 zzGzDe5EsH+$=Ct9Fd?W^^{a0VGQ-qXR8#Z?>MkZ4r%&24I$-75d)4$W*r)8hf4ke2X)>YJrodTD~-21 zD9ZMYJX4q4DMvdVA@cc&C!x^`s82`3ZkuVvk>f`SuyQ*}W(BFHEO9JqC?DL$>#c>;2(@qTb{-{tZeIc>c1kZFIp%W?2)E=l=<3VpzXICXSd>A7 zG25|;jmO?scPWyw0t>aJ2Sw+VoQb|W)%N@qbO%X zxqCevLw||;Kss-E5(QwC-XTQ_aDYZSl;Y$V6qG{7RC<%RK2Dt84v3GXh{N#ZIX^GK zu+5TQl5;^mieA4?{n0k(-h3knwhdcUQv!4imxeE>ZPSCy<9lGc_+ zR&4iCx-HljZz%%dw3H_AhrNRqTn6cA5=SS8I5)B<*r-wA)#G<7%H-s5C|dx$v*=3C z0?wa;+ze>tw%v8%QHcLo#~7tN6H`<8e)IKM&vNBVaUiBvfN-r!ax%_rpZGiBk)%W; zx|$iCT~L+XB>v|2DW<&7yn^N7mCOy9T$oi7mZw`Cy?3Tw!v#O$oE8u^)j_nC=6Oj3 zP-s>S8EbVT4>AVZ##nHA(V?K%3&NX=#O14;GxKrLjSdScSD%}4)Ts*3pY=A%sx06T zy4ur45tlyHFwdCnt4=l*mH-x@ML3>nmn+ zO$iPfkIh<8%C%V3o9x7Nbvf*^&`$-!@<@C}`qEZaw+F z7W{{!xi154{PJedeXJRKz|lwwjo9c$-5rf+QT<*k z_rP?97AN&nSQa{Yn|_tn{(`UJIh;S1G!QuemGiE@{#>zh^(1^j!*M;jPQKONJR?)L z?G^?t)ww|3oNth)V!_-w0Hcnk!7~dUr!OG>9pjuy1b~0fG^&%XerXSce3d4CumG2zS}4D#?{?~pjLeJ zr;63QpK(Z||61F~GKu?7P2D4tS07+zFHS@ZTMmv!kvI9-`MCP|H>Ea9KxkE3ywQGE zhC0m{rIT29`ZF9QQ^t!qm!vL`Tz|=ggO8Eo1ZI6pP|(B$TF@`Y(vI{qf84SYRcV`5 zERyMj1?fvLjzxg5zzi42prpaaf4LtWyaU*WKCO)&hkX(q@U~6TYz0B|mRJn{Ib&`5 zi56$KD+o#9hz(i*XjDllR#_Y%cnt>pZ&A?4Ft;}nEgvEjq9;aOhmQn#SqrYes@iP; zDW5du48X;$783})N0oJ=%YH%gpe>u&^IHdL`aG7hkIGmup8&-o0uMXPxv_W97oc1N zn5&ONhU!7drhZ2Vyg#J~PsT+3sqLgZ#Cp-{=*N_RBqqkB#puA6h=Z`=Uw?EQZ-Ep<`NGR~A76c*%NA;+n94%@rgCw* z?s*&QA30v_b;fx^D3Jt$Zsa?9WiBteP(q;Kw#BZAAB0C{@of>WI`EQ^LQiogL(qWt zEv%Z*O$lK?y5AdZ+Rc#LVJA$d5;pb86A4ggMkGFeRP1Lr`hMe%~J5JazEP#1?}4 zTjl$^1G}JoTy%TBK!3AdJ^uM*RKng&LVQ$|xp2Jmdbj6pw^F?5m7M05mi<=Z-Bz%g zDJDK61oMys^lEYDdYa76kW5JgLwDTrfZB)Hoe#1@-nyMMhd0xs0XF=8v-)a?wDNH3 zE=xIq{mz$cfD7ZIc2^>+b~al)_tv}h^{vHaLcq)o{)Kwa-JE^?=6>gywV96ny`lYW z&n++@V*|8ZLsi(0P6~)bavL+$3p64Muk2Zju&h!I_H`3D4VG3v$na=dHuQO6pgr){ z1d$>ze+S-WSKzeW*Ify}2zq$a-n+Z3Y8P+Ls#lfLnrEu8fwdwV=@Pp$b#<#jlY3t% zDt|?KZbn+iO$=cVfWBzG?`@k=$bB;cGCYVj8yLkL(PqQH4Pl*tFk$2a{k!Ng@x5X2 zk88J`Yrqi+jV$}1mI=npxi4E}sBAgyFAh~~R4loJ{^nKd{e>zHeP=Dn@eJgi0}<`(fXitWexqWwjdAt%f5)Zn|-}-nU9JIYWjf)kL#~5IwI3-dIM7=nN8{D<9S`z z5g&?Z6g{5h+)X&B2~A@u{AA1WZ06LEh8U{70XFNu{WotVQs7_vwlCO{9j~X`Cb~kNYW>U+ ze$Zm%3h+LFj!M0=a+hO4eom|RMKwC>2yAKXlGco)-yRk{!MfsH538gdoQ6G~ zBo>Qzl?GGW#yp z*FX4tdwCFh(g00%eC`jJ>jD+3)2L+c%l6s<#ewT4%MR~+XO)Pf_zfG{f`O?ZM#%$& zxx1rWd!t(;>Ei1d#O~8la}#A?7x<502Cz_IYhh?_0X+&YSZpOgaPYs#w6rvjHEB=# zF?Z4g%0dgzI55y{l54&zN@96if2$!7_?fZGm2OUqc6DS(Y~2o4e-rqwL-#q$S~0r708;}QfML1=Bc$*8L2xN>in7FsLwzK-0nhv~v1c0B4F`l^G}f}Z zd>1hN?O0VqOu*^z%z=bA6cs1>o@3)BvrkeVTkoye;sjdn0Ugfq!X9gT9ndW3H9ib3 zg{~*f{<^)k@}G3W`XEOy&R`<5aLGM}+nI}hSD~V*%iuDu=Wnvo07jDK$3ew-xr8x( z|7Cjc&BvqVrT$KgftcojeOUDT(>u{ZHBUv2MO8l<@&^T<;i<_qR=Rdr*w&!^EZo7C zXnDhlwx~VaZa8fmxcz0Il(@GS)a?w!oZ`S*4jRDa@nY$2#c;yGLla4WMH`ARIdIW< zWF}Qg?Qf|O`!Fj>bfhYpMPRUE@D6wmC&AKPTs#TW|8ZvZ=|W`kPprk5UAw{x9leo$ zM20<;l6y7PsHJgi3BUoQ>AADOicbYLSgOM69)J*XO923Z`Os$mWg8G@WnjyZZ9 zVB+eB2fzy0SGkJFiO#g^Ve)=Di42;U;B>fi6mB@d{_c61P_7{#H;y$y->P>2tMWa| z-1YCkr-fC_##C+_F_V@S7S1%p9%U#D(8K*>p4uKoo)12}Bp*28MvJdN`fXDyI@7+# z8148wJU@voQn@S&Q2aSYnE{t9ISCIVBuc%0T_K$I_d+Ajq>S~(L8ZCe+puLs$Gqdt zj7yxHU-QVulKG+B!ikgUN{D)VnA2@N1SK%SypN+jhuqWr)HtV|jt1eRMxsw=D{i5K zB#tl%NPU%i z;dP(T5G+kpz`V%Y8(sI&lfjP*=xHz#=c{ve8x~IH-mEJ&*m8 zgNtfyJdk!yZDjtmJ_~$*sH%0`^iUj_RndtQvBL2Y4WG{bwec;YWFTU`+v7{W=Xs(O_8` zTmV>&t{G*w{qbohRD_-Xc>gcYALYCaexZ5LGcW+#RdgULg>Qh+fvW6^b}v!R1Vp$$ z^NAH_-8{cM7!PD}#t=(IW(W{vX^tbb30pI_`Od>oDgUA}j;ebi*CU_nO=m-BHnU4Rx zVWPKD?bEsmako*({7-y7IInCi$?W%gu)uB^WKmYjWamWep@5%>~fQ&&IR~v-Im;;isAQXTAe69*%Gh z8e@!j27xzK@M>6yJ-o=2i!aa2!v3;oB2@d3ELcB!=Q zgYdJNfzBpp=Arroa#%}R;yaE-NaqSN^msPTf5x*p0xbqVAsUH88^^pA2tk8+b6gSH z1x5!+%(tI_%Ej~7db%8~&tp`8?>+iePCE@>%hDwNW7Z-ydZ( z&bkfr_W_Q z2sZ@RJ#ilS;M%=1Qmill$yI!9+Pynozv(HwdZEnT$f$-iv8EWfgXpZ^D-{$ca0f_ep#}Rl;IU+Co!y)M@mT^9>@OcnnAh z*K&Yt*l&)D$Jh#o%qsCGE6H2KxwdNqOYm*|QhERGLUZ_?uRZx2Z>PsV;TkB9ah}=K za-}tkbhktXGxT=-m4mZX8V(UGLYDa)wYwe42k$1(Q4>`khmr)l-UDvA-<`bh zaU0lk@+M`URW2G0*27YnOxOMkOO_5N7O|yC-1cZR3QnAq$05LhOYajdTU&1JSr`t` zcZ)ufGhFHmHD7(8?TBtw1qG|1UL#%0O!WZ|$a2PlET?IV@=^RER}ApO0s?_kVFV_D zX(sBhEyOj}B56w+Pv&q8RnT#kj6Sgz`UdlkR`e{cn?2qW=Oqxofv+vm(1XlA|;fGS9jl+vOH+VDtF{PB|g8*waq zzy84OA5e78?D(F_VUHg~!NOPN6Wlw>9*y}+;jh^63H-+6jg1!$Mx#;FM} z^pvCBy8b#)6saZ_dc$c-@7CL-Z;pjMkzIwnpz6-LuiygLLiFXTjQ4KRNshW#Ia1Yo z%gFt&gTfQ<@X>{-cIo1hHjcxTE3}V2{OFwmTtF;5Xa8$t09xC4lM|xThyzS_q={kt zLH8z_=gVq-yn8-}V1y_0qsqe=LAeL$)6sF8!E101(Mcc`*SSG`Bc&jwXBzcaIhYlT z0iwLW=jKsY2|gTT4&Y3jEI4G3Ho-u#)q~jgWepV&#OvF)+;XGA*ey*qzM#T^{1Ve= z!mIdYZEbh*_V~X&eeCRc6H)XK2@0~zKsDc>Gbb1qq~5|IB=o3O7q^t#QZ8|i(@{IJ z3>#qhL%%&}VqpGZKQi01O%_uPcT1+N9Imt&A?bW-*$0^)GeYddZRf;gtE_M-j2ph| zkOAd85-J~Q9zeoD4aZxfa6m6`J0HJ2Pt+oTXkcdL{(iQatG-N^E_~v*tYxmUh+;_! zn0|ti)^<5vm@27l}MU{ep-*&8xnKV02bUy);G%kJhUGxP0rWj^z& z_ormjY)7ns!V-IHk6BiAy(B&LIg}uQekW+AHxH!`k0p)^j}HyPGI!C9`@R zluW-aINIKoNP2ub%AIvPpX0CnFNfQK!dP2}`ko?J)8um{jefv`=O{V}6mE2!D|o@7 z+uK!|RF-Lzq98KfWLcovPIeAdgt6mAO?YRtJ|!6l(_Y!}H!v-g39b?F3HFGo$jcF%cYMu%!J#wMTluL1 zHY``b9zJ|i&Dk~vp)Pt^hvHm0&#fYH?jkcQnT3x(hRh8W($h|;E6B6Vo}=_5q-E|g zX@7hK=j+lyh6`EDAMQUpT6rkxgBLxJ`sy`YmNRZ&#t_b-Tk<}jaAw4A6MB~5rFJB0 zt*9qKC9=wx-b#=)x2chBB4YmR$uMo(Rh2X+Hr^qVE*Jj!SpPIsTQ$N^c{{qMYb%3= zSi|_t(Y7VRF!WNvb9|1sk3ey7RejvMGDYlnidSpwD?cLYl>4fd34~^-_smlhbX-@6 z65v_IOq5NY8P(C>fWf_#pTb)51QdbQ14jDf6KNFPh056-TQqjMXNu=4RkP`ke-cYo z$Gk_umP<10ZxMfe2R(Y#<1N-)#*1%!=k*)8qHIei}uPjFII?q9QMUZ7f>>}K0Sn$g@`WVTL`TN8!H+Hi1ia69o2+P~h> zc&4Xi#{1us9`lYqy_Py3inOXH4b39{N0cAzhCke3 zS?z}B)8adYj9&lia}JJ7Kb$CO&GsZy;W<%wNE?a7>%t0;8_pdnBapLmB1l1dr|^#8 zR|6?;Wwl;(Aei*8#A=Cz`*V`pQppD`^lTRJQ@C$B>@Uq`(z=wqt;su!rCv#fv!voz zt&A>i1s)O9!Ifkaewh9ve{!hbc3%qAhl*m|62U){NXrvBRPvC!1d(2r^G)2G%d_DN zC%D+?XDfc=zbnfORGL)#GvS4V9%s18P&vRz{D4-ByM=F*A zMQRN1_v7p|lSI^N)aV}Q;Cj7a5ij$jLo310Bn;wEG z(rssT-=+bIrlV<;D_Zv!L5iFUwhqr7~!fWOEqqn=C4*@kbv49AdxPFpM z;UGO{HNQA-4RPqXY3>;oSF@h*-kXA09(IO6zv$`aRx=8I?V6k5!gC!26PkYXOO`%^ zJupPYum9RLEP5R*HhTH;ekkTc{=D6d1pv4Vv9T?DOpfnzQtVR!gXqMWPdlT zwpnPXS>qeA2YaPL)J(OBzMMrv{vb@wHH=vK;#Z#yaqi!vh~mM#t$n?{MTqs);_)wP z<*za9@f*E6cggQP(X}IhqpzGLC6_<#$oodhb6#4>xLsSxebIUDZfLvKBpv$|qN;pP zbGbs{M>iFCAXd#19}OX48SPtF6twR-NN;j?axr~xq`L?yu)K|6a+=?XvX!e`+}CyQ zlp;pz8DGT0H1Rh6Ti%9~GdBnRQb~t$I2z6`?0(FN~4eisn)F}S0^A4yzk`X9`1QWgSOfau#4NRkM^grfIft2GM+n! zjCKRra$iDJJ}hf?RF|Y97(TFiw=YaMKj&NgV*cxe42yDDMvs9u{AF}qdSyaoxwV~A zalRR6|6Ha=Jwrb^`KT&}BdR6P8%KE~)(&>^9l^7D4)68E_!L|JP4lm>KJ`l(!`x~B z;iym2N%NK}YIh+o1x!|{eB)PbV!XmviWqJb&^{H(__Y2*Mt_KG`jzu!m4&t)hE;_N z->zZ#-AkX~Ov1&jqT;o)xQ9A+Jo4;;8gu-~7CH{}{ZbV7sFJUWsc4!Iv_8!&D^upC zT2qf+=rdI#%krOl52R-J07D{d>!rP&9sQD!V<3M` zY;OcDN10ZWbbo%9phVuds)T#Wym0Rk{Z~uj`6wle{X8CxWye2n^h&_z(`WOH{uwAg za@sV|UZ>?%hg6?cG@F~hWCy&<0WM;j$iwc>wPIh$li6uyk_+Wg_7LNBb!55(r?eVe z{T&>95h;rmXvgt)SRVeC@AKSEk^bjUgwFR80q$j1CgmSq&9f-<1Tt+-!)8Op+aNxk zwiuDF-2wmftpC!iyb4E;Nm?+qLS$>ooftBC3%-kF#3toBv|M`<=e#V~jTYm><^4IY3voUZ=4dzRJO7zqD z!x=jh?eE?!V-6F~27XTulAIdb$GqbhUL_gOc*vww!=qaVSA%5Ja=k3Y3pVtZ`D({@ z{HqU)^TOa24K|?j0-Af10?!sj^$UpVJfWy#)VxYxhDAFq`jKtk$-VzP(t{k0^T7k0 ze~AM7&(hN10R+tnEbUz}Z>=@EsEB-Y#;J-Wh??#}d86+z%ba+LrP@|WwB9nyoniY2 z80z?=Be`jjy;@}q%FfrKp#?q^Wah5B8a9=m4po;{iLY=uWo93>2=DEQhpD_sMk(ex z;T;#7!@a410}}_qvuDNdsQ=MW$GqCCc~rpB z2~}J~ZXs9E&B~~jDj(7;0dEpNy)-EnM0e#%MlpS@G?tM+egJFY*+w7#vxNN^p7GhW zq7>3&1q+pi?Umo*pYh(*-^mbyzVyc9zbmES@*pOko-@}qPXG1yPwmI-Ops=!D^l-d z@@=Iou!2z(y~58<>i)}rG#%wVdI^@1z&f7oJs(np_ zomx~&je<_z=DA{*F>kx~5ltQW5JPE`Of9^Z zg)l^>{h|gN){NFcFXt?hlB)UJJ^-+z)Rin~;#+7;vr%SU%?Sh|BARIa2Wem-%n~xt zI$)eRRPl!Yy{n++nMKjZMj^4Md^<5D=Vc8XgiS}Ynalss3o-tdwlZALjiV#wLNwI) z^S1+au>f#t-qA7Kx@?ygH))7$S5&Ynka!XZ5PXij2rI_2(WYYk zlcYzOS7NMyMnZe$(KBy*xwE5lVb^n=yT9HEhBvtn08--C^VZTSVk1BQ-uq^HpUFyK zkjP_*e!Eqsd_UV+N~yJ2)k+OhbeyxKF5 zT>INbP$GN<3b5IFITwWC6fndh{xZb+fqU3QGbYLY4{|TOmzFGO<)djrjRwAjep~wx zKjG-;Vj-PolLCD%XQ7-E3G9R?MW%2e3d=bKCQF9G?cr6Ukiw|k|1dE6Ed}f?iSNOB zSfm`{;SG=mVILVW3^KhMfRcPsR^G-({10HfeJ@SRf0equ?N*e@Y);B2CFs$p3)|d8=|) z=_S+^-AKh|-!8Zkz9JQb@*L@Zg{=E2wC!n{YJ1PJ?la5q4o>!u3-HKnO%(^mW6yr} z%mqvBcYYNAh=B_>e$^{o@S3V!k)GJ45>JS6V%T9AI?06cb>g9QnqJHkJ}*yt_cv8 zRMr5`^CUM!y^RicY>MqwGuO34v4oCO6Om(|mH-aDI#b=wfh(Yn51j@vmp3R@;7Tmc zEJ9>BN>lv=TMdzo2($2wuUXKZx+thoNS2$;N30KKQ{~m6**q|0sHI`-9n4VcX4O`+vi zbB&fHYnx3yP@j2OLN8hu_Lv##^YH8EDk#|l7iR(HS7=VJXWOatg@NQg#lS<+MFWO2 z9wHtq%M{e2_^N+UFBw398rm)J?B5S}F3Yhy{j=xim>)wxO6{pu_a=#wIhhUq*W_3- zIiLmCE+dw6=ilG$mO?;vSahruQuDB)DF2QI? z8;jYyk5?|0Wz+ETN>pui<`m9F^N+J|JZi0qnahI;mT$cAOK@(k(oHe_oA`%Sy_1!Q zQcn3bbuGDfcj@nUS=a?^Hw4j%uQcY(sQ4ur*hr~MtMiQP321YiJ0Acoou*abEbRK& zS_x+fc^w5I=bGzvs14+()C|^s8$ghzD2kt5Ci3q3K4-gkQgCvz`D$+I3m#D`rSRvr z>e8Z(9b)n<^@m^JEIIrwUH`IBY{M;gRQUdoIdX8=+fh)SWMcuDUYV_t_rZLU1Kcb0iVk}F)&F2QaYl|z& zUrRfb&N{>`G-xZ&EXx>&+m|lRcBTJ}HAf-aXls2eyOLG2bZ(GEeP$crD9jY09|Ls6sL*<~-u4=lGX4Q$t4^Hjf3T0*npOD0 z53iG*Mmh$JiEx-kAW_ciTQ@ zje8xCf4<}-so4xv25RRDzfH=>;^yxUiMaM+^HY#Cd)3t^HVfnfZ z{^}aEEn!n^dix@P> zq{!u0wbv-L>zdDZQf@^iDiSX}ths$i8+<8}pE6a0;~(+~^2;h+>?mg+g?9dS&?w%$ zojS({QAEKx2l8b%d}G+sx?Cd(b z_>=ku2(;?%nJl$FDGC(S0IvaH|z1f<^-~2s5|ckKM!CE{9(*j?Y(Mu{i_bZ(u8CyUZD-cLV( z`@;IieHj+;jt|&1iGHQsQnAt-x82|Mpih|!SYk!bCa?c<DO=Zj#q`uLA$boK$9VQk>oDmURrzQlpy|0~IBQp+J* z-$IxD5pkKLSh}X>S7bevW$LKP9mUj%LPzydMWzLnSw%H#b_$yH~&>#?SSMgAZO(Z1P;shD-f4|yy@I`UbOZ(&(%%J z6$*vWp0JP?$CkVjg#%pD647;Zw`xzIT8FHuPQ+RUr1z5pIp&9A%rpopm zfmM{zHzc1DBI9dJm+ZYh`%W4E@GSmLw(9{YnreQFQd5$0iybkK2528sz6l_plrWwm^Q40q%FX zkX0e&-*yae%eU~N!;9?aESg&LiBhAqwK+?43re#1Nh1dLEb#v0F?dN6*rUu$7>4XRUKV@KQwE`Y zOd=WIZGU;#Ze{rvW7+<(rzIH(r->$v(_uFgoarP)KWOUEh$NW5TIA}JPVLg!hWRx$ zg?SV>b;$68eA=(PkG&^N>xsnRAa)JEH^s+g>APmqC(A5j>ibsf@ZdJylC^!}t3Q{4 z<>or18&s00%a>FNj{a#*Zs8xvqDMhk%MhJT25o4HYrl~lsk`T8V*k7X5?-i3!zbvu zBaqs~$Np%RF?EQq`{&23U!DDy0mii5v33H&S|&`-%N$tjszhCGUOpEh!$a&(cKGz) zcO{Fd*DZCPq}?ysnm?=G8RmrcpAkCy#p?x3w67G|rQ5t_wc#Cg|{i0&rujEc{)O~GDNQLs$=&%{H3dbQx& zXR5WNd1NXBvGxslKaje(>*22i5X#*TU=_pqM29f?_*!zq*JCwlL)drH0lL*M7CojK zK87fIoZ6~#O(C_Tkn*FBOh@Rx)@WC>Fe!CZuL-j=wSq{`StQyW5;l`lJv?g3L;6a_ z&PsT#MNiVesY=(y@UCkF`=^1cF!a8i0avbgTbwqVR9l)1Z7i3RLlId5d#!2hQ`c2p zHR~4l?eIIw{(LY;*3Txp3{VRNyVW&`Q=Z>zX|l37$j|c=z|Pb$nm>AY7>lXS!GrGV zn&`dK6;3Cy^f>Df`NYq@6TAJcAo+YaYBs)O=^~ zx%xuc+(ms7zcJpJ#FXfDw+Au3VU|L{ zgw!GtRi$6~qdf)q%ifGaM{NAdTFMZht)H96q9RK^1aUW(7sA3D`gQ52Hmaa+ioVkkFx9!$sYFg)Z4`2A_W6z?ES8IKwp;a!%W;kV%y=Jc&EWlI;BwCVL0W%80*&d;q?WU!{ zyv`pGb0G&$7-FFtRTl4Qw&4vKa?j8gLFVoPjdRLZPq55=k~d*y zaSO=0trx@GZii27H1hT?8^O>E-d7TZ`T76Yr`X&$9C zaS)p~>>I!Nj#ulqq8^!h-)@YV#k81?it9=3H38_Gdr}IY>qvN~fbsO|O3Wc84WIH3 zR23Vmm6j%=YjHH`4lGkoKr9zRt$#@kP1n3V#)~Xpe)Dd>N5Hx;Mh(}DJ5Qfj7Eb2* z*yLVjPV_0qS(e4tSZ+uF#o5d%L<70G9170%Jk6B>`q<%PA&e`$G)6NwvdsbDCA8eJq zTsunipkp(?3ccdLx9!(EOcI3s`td5C%fjS6#da#+ds*L%u=gfo;oQpRyiNR8Hz0ai zH`E^0Gy=OOTx<{aP-Gt9Xc!^k=O=ARY#XyJj7c94%N!rbPH~QT?%Ji^vNiqVqE-I# zxY?v&G+)Rjvz3nBRGbrPeI2G6~`GLRE8wcO_LGr1? z_AlO-W77BY1Jui*E+#z%(jJ>45V45<1ZZ_!(k?Iw zFsx0yhAU%_`{74NOJ#)9)C7NCOX^psXBmP^co>bhNoYs}6N=c!vZ-~fG`=x)+f4d3 z?e7;c^HAY)goAF=>6Wh^%r#^}N>_N1FRo0P!e#7b7)7~-O#+_Uao@VhZ_M$D`8v2{KRs8RKOW;0r(srYJ?B}ovin9J^Z_mJySg;sx_VeR(- zDoE7h_D*>|TuQfzl9bbTWxOI%vR(bt-_N5+ba!-yZgY@<&TZkB%mOE5-qKFZZK^GN z=y{{1c7Ab@Av9s*8gy1ll=hZm6gaZ#>V0j%jrH8O@m^DrhX>Dd57XGQsE$B?s_xUOXnMS@*ozPV5 zDrni&(l+6V(%%`Sc(eHB=HV_W9H&m-F)yo8J z+b2n_fs5v>2%_IpzLxOMH8rY;;^s{1LMj|n!zR_sN_%m@K4j%)G)x&Bd{d`){_e>uRE5#>uy4+Iws@tf6xJI!vR>Gl1^k$HBASt@A zQFo6^tyjQGD-AK1m3V3E&1y0rSgp~fRhKgX=wl3p=Ee6;=B0J$;D~_4M+9L(kLs?gDI>#MADeTG`Pp5hVX9D>l^ZbC9g9$zuvyl*p7~)bQ=lSCs$=Ob z9ghs1vq09m$(Z8C2VG3z;E-uKT}?{C6n9KIre<~M{s(AN1w&$31U&3u>e66 z3MJtgMu->z&WE6r+|$hygH0@&N|O&E{0Z`#RQ$zRoE++Ga!iFPT5@yKAWRdY$F`nY z^d0GgNXly8;t^|^|JpJ=?H^LJPfYNVX1PBU`GavLm#gy-JHr=`$9YAi|JQ9@HVrh&B(9zR5`$(Tcd9ZqQnM&e;3TwtLSrINWJ$D9j%`E z8ku5$(|DMoC6G9^fQof64PorMhkE<@yx8iZnh!*zb`4CVuh4AJ1<6i!jm(;9vc>TLy zX~T3ooR%74g*0BOm?K$6_1k&6cNyzD`b9yrX}j(|6`uR{`}c2UC9!be=A5nv65e%a zv)QS%bl@M60%n$Y@$k()eLeY3lgq-Ymo=fPc&f6ufxT;sjV!Fhwqt5B?_OWnXuEYV z{D!`pV*K~^2Tir`)J%Q^M=ICm{mUQ%lp1hR`%u;nQyh=G7R_2qyda+F9HTGXeSO!! zBqmdOD`@|BP|K~cw^DvY0RaJ^$9o;cp1UNwox4$7a>qZ{gCU_ay?p&>JC?w^qL9z;^kNS8~TgCTJ zmo4WWe1#ck+bM@G=5$uTO+-%l-rquMHS%Nq{zyDw32KqOSK-z>Xg_o&qLZq{Bb-ZQ zu1YcZe!HikY|tM!9h0mQrEl#T>MY9Bw`<6PJ1)3bb==#yd(vb|NOO*z>$*Z}h-%+8 zXvb*|cWU#7Pv1U-&&|vb5vlMfkpT_C5pg!hw`nb$w~Sg;oDXb##}uqqot|E66csA> zKpM9!_D>ZnZWT%(>|7EY-&x`26bR{uAA75)XL4q|`hdvdtzUKqmf^!n=~LR}TQOx} zY3*6uy~xs?wIW$F^XxTKp?FtOJ#!kqmwKJ&;2Ekk-wP(fWZJu$lk#pFhZMq$Fo9<` z*2E6p)Zg&Z2wLj48sL=P@??o{AUvZ83DktqB(N{uWeYoxyQi5e9PAa z$WccXW*HLng=EtDW=8Yx$WNjO7{kQ^-{_d`jeD%71)=3ViC%BKC-6r15%MqVZUOYF*Li2&@pH~w)t0ku9=&rUjo@I}YbKH`yOpmg- zhcXATKGid_c7~}}AIFs%8D?81+JcfK9;#Y4ShpQ5Qb4P(GSymvxvssb=foj=CC5yGE8Uo#W=C%(6Y&$n;cYYu7rdOJGV5gpNVEArX? zblOH%1hFC#6cxkDpL$6}z$%3Ua*+dK8zWQzw@&MvpU?!)@AKWc;s3TIs@4 z!n&e;IYA)vijZCAJ?U#E{2Z~}!!K+7vea4s*p!lEg!V9flS$E%kytMAGCTb-`yqBQ zrk=_0;16;Cp!GaUE5a(nBt2@NtG}p;u1aCGCo3Gp@g9Z9D7Y7NP20XbAgvvRVjQ#g z43YL1cw{0(<>wEhBa$ZwrU=Yc3g>Sb`u-XIFE*^&|#~7y&&=);bYB z^IMb00(ud|DSIucD%Tj8A_~<$B^HLtif14bn!>}<37!g7T#{_8gU+;Fa=hWx0oUOB zN!qv=CxKX)p$H1szPXJtG>Zuy>%w5{Wm)3XKqmD$>059-{#XXG0j0#p&tr>@{zyoJ zi{4-_cV9$Id}^;Hh$iql{UM--&8M$hs@-R<>)+##?oB)4_|-I_pZs($S6>PXb70T} z&wtUdWK7Jp63zI@)F;&hofb;AlNOvb-jL3+8%S7ZT0?FLv%GP!E6 z>Z^v-0b7oB#Ar`0tM2q&)0Y|u{x=bKlKPpGi{AI9)i|)2N_|rJ%)HycZ;)pHtLGi` z{F^^i(s~Qe#aa7$bi)&y8Xp6uqS7Hix=V}JPp(Hx^)HFJ^!9b~mdxQcmu{zggxCo&VuM@wV>k?bcn^gliY)mwkB1EeFCr zOpLR;0aKl6Xfp@weI~Mp?BB~IV*yQF;?-{V7athIdDdz&yDhYi&pgD;T?{XAfi&kd zPRALWFsJ{qw+7< z6Ov0N1!EZ&1%(3VdIEOX-?l+@5pF<|*#4zDzVUk)iq%cx3DC?(+yKdHaVR1C zYfZr~sJT zKVX85r{U}*RO#={2P7MEDPu)`tQ$@bWk^5&MSTbP7pP_mb$v!Ijuvkae;QyF{UnLn$9fnqL822LLvE(kZ=3$CbVccU%{eIfXUO z6#QSzNO@wDzXL~N{k&}N=B)B`?$}@QajPqtilM`vv=hKENb*0wtzwZ$*9nWRj!D>M9SRj2uNDpsUIL z%G`{6paHhj=%G_cE0@ zM*NxW5F!?#xk(?=|LcpJL`bFq_GU`@xSX7Oq$itsz`i))wC~6m!yUL>)nPtfcIv+)lHpu2TQ9?=3-O=Q4^7YqQXeR z<9}l(clQa%ch%5wl%q$)OBY>}XMCy(hZR4O%F~44IJQ|`V*a{<`2Am2)x8pmUE#Qm zLskLb?g2L7JfnS!j++xI`Y_SoR49UJLlLML{n<)j6hm%;`ZJ+MFQVWPP5z6`)$Vxn z=x1zH0mnj36YBRgfO!5j?g#P7^0qXM<2K9WB8SwMsa&!(kkm+za6Jtbv3?8Hf zh;w2q2%#X{tFpi+-YtC}E1~$SKMnp&1cWUG?tXnN@MKvxaMq*mKg{(O1{KAefq^9? zYisgUju07^Ng&J1v_ofEAL|WJz8(VS_p5H?8hj|)Va-=hfn&srKr!)H+Bl5Q2M{B2 zNL~BiOaJw0zm&(uv2gMg_i8Vwcli~z;`z$%vdv~sv893Ii8};;t2`I|*nI)wPap{3 z^@5*3Fd9)!8^9B9fc-<{xE=Tac<&Q>co?Mm`Uy3LZ_Tg^4V3b9vYNHaI2i~m92~sG z!|`s_KcD|4)N<;n)^o##s=IL&h-!{o1Lh8?n?k1U5WrDC?z~y|(CmIxm>0AuJb=fA z9JeQcQTQ|VHrcCl7kq0uNE7*M#47clwtm{A!fH+4URm#n9)KMv5<+nQ;^~8#1G;%I zz=zhq&(Ru$k8x=02)$!JCW<4(z-I#e`i{qI52>nZgQbt(0wPEAG%M-MQ4}I6KLu(; z*HLi%A+3+VxsZg3Ab?W7VMG=n^N$wrhEHNC;(}!j%xX$JyiZ@vB244pKIF!j&Qne_ zU->~#mi6dk<2iS>sS`f|GV7dyK8TG<)aB0|PX*W|HAWsiGurqP52BXW29)o^b+usG24<`yn7v3&|eu{sX zh*g;M(yzSt&*Bh3Rhsuc`(y7kU4tLA#-%mj!j!0Cs-b};dO_Pa0jOB?{G1coC!~Jg_mhu($4q~TAJ85U>J}#$^0AXxZed#h? ztkaJWDGecLN&B_4Qip&JC8lZr5zRk_h}C@YEakR>rzxniTHi`l=oMhJd<26h?%Cx* zeh2XBY45eoZ+!w}I?_vgu#6wis4-MagfX`RyS;j_=>Y4VT~ZQv^Vm#8JP9jnWrBSW)#I8lIdjHsndfXe^d+y4)uS$ed+ zW|%RVvzqE4Fb?0Vg2|1i+foHvit#j&7h5&=Mgi4 zBh=gbB4R401+OAL!+Sq0yondsY6$OS(C+u%gK9q$=6O#TB~E{v-30^EBRDAvLsq`uQwh<061O3A)SYaS4V$FH>>F}wUPj_2*y)ziF;En_zY+}Cnb>J{sRT1 z>UwwrT0%>88fXT5*Vh+@1h}Sn7IqInp&JQkw0MkF`!_cKwQe3A7*ZYGmgG=6t;aL7 z>}3+IW%)#U*n~h_pGWB-Qh@`!c@>5r>W|0WC85&koH_wnfk&LtwC%Gp7aJarK5A<_ zD53jDgDE-CTBh*#j>-o~E`mY_va8<l&U+}&Q~zi6mLeo%trrs&pAaA2wlk23jcK)Ky{fhoVajj_4hFP{?Xg7S{d$(C9-z~hSn|@JRv;UgYi=sgCMdHDXn<_j z3B!AQ=O6|wu(Bg?{Evrtk|5&~ATA89NlQK-GccQ>%_+Z#_{`Diqi`Qe=7tP9ShcDclEU-c14G4`Fua83MN?i1^GVZ88ZVop&W34%CbR$oOco?B%_Vl>$UcRU6|^ zff{xai#JOH=Q%!Jd=F9`rSeP-<_)$v{avh6`{P?f)5VVrsjwMh2rFF zi2G|z4&ne4J0AzF{;}WODImWGZ0N@V-bo#biC7!IevbhUw6BmFm>9wRq``{rDsiU$ zhcxbm^w|JufMSq>Ye_`Bvfp7yz^%Xp6k$8*Zx`)>x-V!IQ>)pK z0(#@7R)O+@K+Z{KMrLMqI=adT%HOg^30M$QOM-i6`kvW$TlWb}+mf{>5UmHFsj`(P zM-Cv@U_CX^@)r-c7EpQsWgqV}8zgeVgZ}+Z`E$UNeSz6q&@aB)twCy=b8YR0fQKJn z>q+#mPLFm#I;hOr(0@O5|A`08!vyQDYc{9L)I~R(lJ-mf!%Ic)x+>|PbNq*wmVs*| z&jRhXuE7stBLv6)>M6!F=C*E(F_8+h# zl!uZ{zs?LXgNC6^VX$s^ukw9V4@l4eKB__?&V5?$;Y9#bBqThPC_8CiCsH`2y#Yda zLCB`}#-Q@9U;h$ji0gqY_iV7J;9=a{T%{Ukr5B+lZri$FLDMV1eMblTuJ^|iJX$Y3 z#>Z##G!mw=qJcZ1SezdKsYfY5hp6_BxA&v`o*s|F3#>ST#fnZ4;Ia4bmplkj4U zVL17%${#+7d-y*y7YmIqa!3~^7Lk045`4d&i97VVGdM{5$L#j~c_J)5+Km}z-pnqH zzw^Sw!sg&&)>rL%FtF}CykJXe#oxB_???OlAWBLvqPpw3HtXX|OIBAuv$2_HF+hjH zudoaJKI%ow71$AnGy6~e&^Irnf_T1GHPLfv^x~P#WYI*GGrj+p&*0$+-cha9JfR%$ zn>Y`jii>$Dqn+jeN4-$@^@LBHjN?1tz(!`2-yWMMcpub6>99Pw)9LQ@#YCg)^4Dh- zxn*UCxO)zr7sp%bV|-W-EwuOcjUSHD$7=2rXnINzkqI^Gu_Z*3BJQ$HdS(s(o4+2; zVETY@tupR2t(-gAkaWA)$WFnFKmV|qlTBT>--3Vj4SLszsNlgE|57fgVNgIgXRrRb ziR>kC6CL&&(R=Rm0C)b(hz{kRgAlo|_E{PZ;BlF;f|Kp%KirKK(Y1lPIiY6VSEycV zG>?Pu+p)WsjYAm5=MzeG&jF~j{F=YV=$=nn&%lO-slnwsV;JIkPj+U;+@(m5x+Gm? zUEf3yL>4DO;6ses6-X4fXGlBEtB$1_!SmqzV zIZ_icr2WNJR00dbv;9Uj^s~t=O_vivJT>{JlYBEkGre$jLlG{k#kcjL45QYB`J*h8 z&k`#{V$pTyedY~|zStVh2Lh(W`hD>i%;6b-0Klel_vh*y`;kHt7krB(y&(?e*o^lD znjcXnUG~o_otJz$20mOW8!Mt$LaU;o)5_qW&bwFGqByLl`IUCg*VyJw`OCg#BdT@i z#I=vzBKzYdJadrF&oVZ<#Yf>kLHznF0L%(6Hd<}jOWhE9dYMl{R+^gJu`r%Ko|&HR zRR9XkeOk5k!V_b^>^|S;tVN9Nq{>O0KIw>wEQi4U;|1<;kotT3w!C77_vl(q^_%8f zoYWwTrnrQ-r}mySNLu=;{VnpDIXO8lmnS(QIdY;o1brZ~jx5CreM7^qYLX9;w}&nN zV6XsQr*?q%n&iJ+u$thP3?Uw=zkE>zXou>b9P59q?O+jN^+g+GLc4fO(yqc2^5TqEiElk8_`MpdrLkT z_dB{`N&bX?&Ktv-Mv1@`CHCc#H%=4sgngZm(QRc5-sP=cXAb`r<>!a|5NV`!bw=&HetrqreepsylswobSZMdH}sr zSa7;KcR9D`%2GRHJE5K(XG)&pxS3N>P|!YpUjXpmW+aako6OoCY7Es6l!kZWIhj~k z>`w#t99^HxL~;I@EYUGCC6Bt;Y6J+(oZ$N#i2R4~;9mhvpo)k#tT;DE+1yDk4l_d; zLSf*OM~Gt}%)1f0vH_LzMtEIkaVcJ6<@Uo!rMcl4KFf>+6Q>Ng6=ix|qff z%0mEvRpZY?Sw;W&^*^R{_hiTu$n31F?mQ_DeSKHg<2=nA zUnb-6NJT`a|C8jvZcJ-xaJiIg{fI^pukY&FtmT7-Ud_QU^U?bq!~acM$Zf!)ZNH-Y zLmmEea#EjptixtXo`a8}$HvA^Rv1rsA#$Ao>S`~uXX^ee09+0`@iGpkk;7Bhy;Ek> zX}OcM)4he;Nu5xi*2Rg&o8vU8FWZijL&%@9-4S(tx={4GoW||E2k2EPh$o9A zf6-;`dYBOU`iH+=kYL8n&Q6{}0XuLX(6OJakvvui$6g&n(%T}{%H8D%NorerP$gut z+s*k_VeRga5Wal0UVT7;X5$T&`G=lTy`Fq{QSz&067KWkBDcArp@VM4hEjc$c`>6j zI2OF;&p&(*Ai;B7Ppfx2bv@{g_!x?)YKH}cBK>i^|?V}iEbD2Or8Ms4-kk} zE^JRLcNyqtyxzOLF^3l=L>3{AxW2w_Jiu-|P;ER`)l1y{fdJZ16@U8{kJW-lQ|9K> z?e?_jX0wRdY*xv_seWx1P(b#xmYv_er7|17e?P=Ai9NsXD{x!-WV~a_iWj|LG$7jb z{CNJj1&(IDO+MutyU;}|_f@MBrGfq!+4E&4vh{;_w;T zEtRXGp|An?g)zI$`lrVN_@vm(CJtMw3)2%5ESf8Ug=@GbTw{5U9x@gxwEJXbOhnT=~}CekxR~=nDc_YsH=yg_f;$Yjm1DR=hT3si_$vQ1aJkE z`JL*7Ivv)0qPYXtU0kXG2YDXe>Na1zxRG1k(fqXKImU1U_(O&(} zY+t2gwl22%aqoK|Ds2T);&8iO#Tyq6echz-(NYaFJWrT&Ii2IM+oqkMSkAy#pZc(4 ztZ7_Gla+5T0RPcfej*e#j(X|hJ)O|O2=he-(C*NXCM5xqbxau6tMbGhwT+W>{Sk5D zGtiLBjSoNDipbq-Ai-NGst^(ys_A^tev$QgVaQ(T*3M1WHN~o+j zcc^GMv!}gQeou*d(>2b_%$&7gr(S(?2Y!cR8p?x4)cd2?uw%TS5jE_l^ai*t^DB6n zKj~AcK*4tlas7CfM51bZM4S~$2=#{hFm8a6CFUwPp-fL~m~wsOP4X%hn;Xy<5m9z! z+ZlTvK5xP3%wfM-M7PHn(;pKdK$$Oo_~pXzf*FJ4KD%ZuQz`XnVep zz1I2~a3|E|-c|oz>{cMnA%enS?lwan{`I<$qQ-~J%~1>_bw9E_IG8;xDCo@3_OxS7 zr`MiA{e_grE&+R&{TBm@30&j0FDIOtoGg`Ma)C=9Z@TO(V~b9RQT1YABAqw)!tF+$ z-oidbM9h<|sgb;HYT z18!sWWa@;`n(wI_mRzg3MiVx?RM_uFK6KpOEZMNFM>8VN+=n$(NuUlvwKRx@3jgKt z#6<*IM)>0eftX34o303!*Ehb;Zdin5W19B_M|w+=V~0^7MvQ3KkbF{MJ~rBs65pf< z)<@wp)l8D7x$H-D=S|s^Ve{G>=bIliw=$>I6kjsP%r5eWe1_~B4wLkxl-yXMLX`x4 z8m0tfvwDRSF7z(#B6r@~^?U55x%L@rWY9>*Kej&}Q`~Y)biKV@;=aiU!iJHfGB<|1 zLf;^ML@^9%(q9#usWhKh2=SF9(T{R=J_PCK#;?3uXttrx+L|7CeHe72UhjAeTsoFw z99&3m=$qbRK28>`e3-5`-@HmNylllV%)FqlaywS!%7p!X2EgfjLylXuHy2yBW?3wB zXgAvfHzpn~a1FZfdPHt!P{t=m-6n~GjU@(2whDG1)6Bk&^u;@B4)3@>DNr`sTm|mt zbkid+y$~BRm5g7>z&0+=++94IUE@;hI8x(MSiM{!`=MG#+_Z5m8dbL`Uo;$GNgh=L z0HNa#k+}JQhoA?bzC+x19>;ZFXl-PBy`pq@-1%0HnTQrv3wMUB#3qSOz6R<8{7I9&eF=fFgqLzA^9}& zJlbWazC%o}AqDG+qVjuaQDT zSvirv$d)Yyl)?<4S1v_4*=I0N2bECIsWgU|h~AB@)kYo4?x|0~DqUC%Ib64EUI{ew z7neTupzqH=QqZ4Q?jf;#z74l0bH#zVaEcLavp*I-A9L2QmwMGAzv26%vVO;O+@PKs zVT=FjM-R<}255`ixk6JZTycvCK#Lkba3-ifrJ$&-E%e0DOfW&s#jT7UCv8jHCge6m zk)Z`y?UB}HN4TmIN@A~LrBEYmMpMtdE9(7G1Ffd0b-Ytpr}pGz-qBPUb{xIRVMo~9 zre4wu=*vwXx0UB~Z)Qdj#$J#2>Gxwq62;00x)ohZ2OHjxvb4`7Ff9}#;=Vl}C}a7w zNhW*#-Xfm?S!g=TT-nh#-PPvJwLsl65sfs=Y`!u?8b_VYS{SwPM(B|TG~S@gtjcJF zPPcYYt9@c7ZrPGpoLJI8n8qR*UlePOW32t&nwMet+EVMI^}F(xTqdp z8wAh8m&&pqu{AW+%HF8P-GtaA30p#4??kdg8!u`P*G6+0A9WR0GBHVlk<1hv1@D8h zpudZXH15M)l_&+(=rCTcjH@g;Xb%m<3p%J$tm2CEXI%~~7fB}G$w|qz1RLIa)w43e zzWr;C*x~txCEPtJNwlxZ-zGH7v7YQ0dVxU9#tgwB$((VV+GPp~8cIjFy$c3C7wDS^N2lz(TY0ZHdfs zr31WztIF}h?e)U#&1oYIKPO@S(<>-DzYx(u>k(2|+PVQ=NdH z14zz((O+0oqHF}-dP&YwL>i}$vLyHzK+m@7!ZPt;CdV~RHo$eLuYeDXP&l7 zdI9HtZfAbm0(Qokns8Qw`MAe#*L-TmYI5i5c0e;naOND|St&O&jn@gbCmGAaHCTm> zFu|!=1?x5>we@DQV=-&o=Mmf`JG++x@SjSH3_%|La{UcXq=Y8Ps0yA^y&SQ~@mskO zuKY)!tX}geVbzn=cv>JN-aH1vpygc)AeIQu~g7@vy2kkv|@AU>aj%9giu0-ZzWS8A5yv-#Pt7X3Sw1L!4__1cp!x>1nMX{?D zTbU#&&h#A6mWQi-NPWs5oyZg0Ot2wy)^8cuG_)e|+gmhfe{2hLA;V0Nqnp;#+hA z<>moMlm{^jqbKtVDJdSrp{{w6+yo?w#-lh+?^Qm%J?l|#W(ni3`Xz($cQ+7UjP{yk zsCt#5%-K-bpQu!uxehdZJ~8a^EI8g0El~q7BRh9=mml}lVt!CrQdF*4qGD2fYF4>$ zyi{y~Ca=_C24e!NhIKXo89;JyOfp7kWd&BIrZsh$pbfkCE97-6(W*y04p23bPsDk_ zx9lgwI`CgR(5i;sJhIIp#f}?93r;X^n9AjLDv=bKAALN7rsN_r*jiFR<)S&5*tmTe zK-@x}n#o%4>S3#z&R)26#^ee;!h774x&RNf{zP?K6?W>Jxl*F9r=pEcXnv}Wn_n*L z;!e6CAYU~N9>B7OG0}5T;s&BNDmfHfpypCj<>7Q{H)E%I%MGI{NB6bvHMY(!4ofMi1-Q4YY*cKzt)##1PdsyA}9E?CE1(55)lZHzXzZ6N!j z`x-q3df0hoHsMUOG^OXl>_#M5HYmjHK;>rAGH0ekSRZFoW9^FtIaH&o@svn;f<4g` zN4mH10~t+7w8l9aWi@dC=`n5!jr!i$es{pY)PkGpkDyn#q)*HgmN{ZiOuvLi-3f*( z&vON+bOvh9C!oMuitFVL*&WDdOzI_52M z@-N$yuF;CUW*Bn2*B1;kH@7GK(EXcax3`Mq(wOArx)EPQo^8y?V3*#Q+$>A-h4wWn zQI-S@f>6`+qFj5rQ?69q#&4NHWTAv;VZuG1Yk-AHe;Jzz+#J7CP*-TwiCvL{2TV zR;l6n_So*dD9!vN;C!`qa^j+lKtVy_s4d#G?ke=*91+u<0#{jwzE{d|L#LvT5)d=vFp?007`_Zsn8W)|O_Zp1XtvQdu@j?aV>Y;zZrWfSPaOEG?# zJ|mnNUoKTDt41>f?qHw0F(Qn%FGdaU!95BwlEt7lD>V%!#PotYO0p||iT+X_)$`GF z27U3zL`u|0(Hj@jtUhCpc9+d-I>-41zd@gI5RfPI*zGw7<93M_s~IY^gnrNAsAQsk zT+3AG!_%e8LRKE{G=^PQ>+mCFZb8DTbA&B08qaE~tyzfCzKIY_6aNZ!aQ{ zy<>>R4yQUY`M|ZbPn17OORfz#IDiMLq^@_H2(y{4Y>{OPYg(wW=|>kbc&SNG_(v(a zT%^=Wzf1(U)cqCRu=U_?dEReVJ+(18E^Awx24A(MKnulZyeEK0pO|UZ${E%rl-&&S z(M#U}&P{pgL_8hjMe*cts9#+ST3DHXO-|3|GUW9SVKIiRmvaZ5_~u!B-ZnLfu`i94 ziz$Qpg_^kpnCCiP^Z6m*Qdch{pKw$t#;aD&*{SA!4Bj9ipiJgaDE!y@-TPO7^Un&M!s%avU6Nt z=I!abTKXA-~F zRZ9G+beE6uxq^X14QLR)sLYGz7Iaybbb!?eVoPARBtj6Em{`x1LE}5q%0FHasbE4C zK@O_kFjPh2dL@2qeC}~J6cMgfJ*X;Jdw#YQ>gA6>vfybvwXv^cPEs?_>fZlJg<@%` zAdZ6!HX5!^^i2A7&l&9_rfo_Iu{1O%8DvV}ltorf3Wf1$Zzc+3Ud1xRm)iNd939n^ z&BJW-lDW!VP_WTb)S23KDRO1Q?nIb&2qr201B|9H&$-|^UQNK_!0+HbVJi)Gr<^G* z5R^-Xp@3(@XdA~aO?^L@%1h?gk}IeHrjy#*VQ+atjn8V4nh>B=&g;}n54B$Nuv1wJP25LEMHI(gDB2*i8q_UVE@CWbCI7&8*okVIBDXg;|o*ZZ%hlN{u$QpeLR6;#2+&|1ybpMoexM&L5u& z3MJQ0jG=iUaGG5uMOs%HEx{1nvGm~J$5?DVeX=kzJH!yvieFJ2HUw&e=yqBh^qO9= zB-1Ck5a1Z~S=Y;*qp)wGx@G7oQDuT;6$}2E&$9|%>H>ePRA<;D2^=<2i0mo?na&K4 zOBo;e%S_ICn-~)|zt{_@U*wUZ)GNTh1cFc*zBnvqID(2`u(Am~w8)egjy)!d{b>oG zmaV^iQkht!0})SV2vOKXQ8k^+m}@4ez_h^=G+U)9!*YLIJ4*U#V*|AHCkoXegpJ*v z%Xl)K=|g=5cJ*Y)WVcF36lE#nc2?%(j92pXb*Lnarl&3a_(~Ako6I@GetLjnK2ey2pGuu|b#+wM*B6Jv{7SFX{rW(u zM2tb-SS0myjr%rEn>=9^$g0{^(3~f-S%1OEJ3Y&xt5`n0%n%wn$e1{-CAE_JJoGZF zaSPkI_{-Yoh_{Ysi+(v+AtJ(EVSDR%u_o|kKVn!sez1aU8JVp&4ALOBfixbld97>{>OoRFRtQ?NmLDHtc86-diFg=o`HnFcm`=h^9%zN7q91LS&R}QQ2P#;<8 z=lcXagGW+ANUhLg2x8x4={d0)3c{2iSSi@Pc!EJZK0ATlhHiRZNa?Q(GEo)w zOJR!8+HtuZ_Ob9bWATDCnwkrf{joMZY5aYRah6x@BVeS;G#y~ai=BGvQernIvyx)@ zhk*7x!TbS!FTQWF*C~os#2~lU9 z6qk`f*jBy$a9W)3@&P2Z-x-DtG*QsuKY!*%Z_r6xLZ(bwx&}t^iqV2yL0%AZ?6_P^ zi1MXsj+HHik6`^2brV%kOqbJ6Pi#EJRBjKSf-@z^(+x7o3rnO%?CDL)3RY%*`b9Pn zVrU!PZYW@J8PuP|F%V0q(-lrc$51Mg8Cz-9Ji^H}t0aKzpu1{wCa@QW0)xI9PF^gb zWk~;CD92s_lKc8-gl@@K`fGd8B<*Wn-Hsn7lbEqpmhljZmK7g#iRGs%7Rn1IV~(~a zih!J{{hVzKhxfST>rM(NRITPSL})Qc@pd2Y>hB*ZSZn*=zP{LVyH$14Q;%xW6xw5%S)C7lGbgXLjmDrpL0anq ziG!CTS)V2^$<^};~ z6zd`%`x6sZ^CY_A>8Y{66N(VUpC>we511U{3!gFqKO5m>`t|}=0m9IOG!ecFCORe# z^exp3!OA{PleSDfvb!RmcsNqYTC0`g=6t|ynmPN+$rj&;U}mj6Za$%w0dgLhBRE}@ z>7v;D3;uLW9%rZqC{(uCI{t7MxT)I7oicT7a3kaJ^v>CGA0f|nMr}zEh5`_Aj_6{b zRzo-`Woep=rKR$RR?r$~<&p7Mqa2e6NNX*+S8ZIrVs_P`Iq#Ry(LD5?nm*}%%ii0N z#=Xjc?jH4n)#y9|lEHmZ=s*srN0{ZMoW?k5fwxI8R>nnFNAA@YpDg<>me$$cTR~Hc zxz`AULxa@zbz`(PzpjekG5?FscB)U97UbJj2hF~Zd6A#2$T*i=m?#Q0(s4m)oIkw1 z9_8yz406SXJC}O9N17_jJR=wMhPN;=AOVHw@*3j2vU5fSW6fyU!|DilSBYsErvt!B z+j>FO)~N7`CRjMBM(|>_NX^t<6KVfact`Qp^MrySp^_-er)Ib_JhQPeqc1j!rC`RA zW*~iW^C*iqi5|hL`QbT|Vms4uhzsCY_tH@}r3(aFpt$40^+nKbn+7LrROuNo2m()VcXbX%mRn1@WU+8(y!#rrYT3j`N7E^pU=jzHpaVCUy&gBbu@OLj6xV_J zau;W!Qb6&ELn!CTGQji`*?Llq6g(*pAdz+|MMitJZmO9LuE1&mhMPD2$a)HYs>B z4x=Cg^}{12zb22}#fbz+0?T=)I=+ROcVaj)MQdU0VK09|rp!5LxC7S~GF*!*;agNB zID2`@BDqia9%lu%Z!kmZ2REo%7N)rfJ3fha1(yM3rc_%6@w#h(&)L-SdR1yQgR`Vb zh9(wj0F?tf)Gyhnf*uEHeD#J!QR+b=sD7ided^f|d=d`fvDTL$WN~Ei)vVk;Abs`1 zC4n{7F4o4HvZ`()D?z--8c48QoV=nIw?$Bq7_q0n4kivg37 zXJ?PrNC_ehES!MqCx=_pptf0$`Yy^*+a$zwOBt9*nA_9FTh9K-H;f|LtGXkIK{HB+ zU!yUy5IQjLe9Ghj(rfrd>wsEE^I^r(UtTcI_imUO8yu>YRQY z?|r60?Sl`9T#mB(SxKy%_AxTP=gyRSmfQEXfN`*IFGMChN0^2O=huksPwt?f)8?ye zF}|37+tt`v#A28)PfW_6!=M}vE3ZxB;5))R?y$j`+Vfe8ydquhPlURxFIEM=DWrA9 ztabtS9X%gaI&S$A8-~Wz;vQ#8sP4JS)7*+f%77u~cqIxON%H9}f6X%PU9sA_>AuwZ zQz=Y_MjeB-!5u9)g;S3zQOm%Eyrpk$?&!X-a0x^D#wM<=Z#u-j$W8fC5jpvvlTB!{ zM407Ym%~f5`Gfa?_1I2Xk7=mvUd(*2ej<8S=V8xP2Pqb7<@I#G4MlQN%vch-9;hfN zT0Ox4JK7uz+_5Y+(ZOftB6@>F(u5M^$a}gP$$nNv^A$PZkY#Y(8`YQ)^UHTu<d&rVkuYv1s! z46N`8%=9#|$#<#rRKM8_g8(lFfw($82FtCjbi!u>Se~t+g7+w*hNgY&tqtZHio@C% zP=NZ92(z1+5VM}x6*vGDPX!W0%9sl{jKyWsgj3!N{ypq3np-NFJf>}6nKAtmXsjjM z%89wDU0k?Zp-rsgAoyDDqJ5M_+6pjGp!Sx>Lvgj$q3uMN!_Q67y0?!;`^BX5yViAY%cWG=@|zT^oH>C>b8RS+rzhjPOl|}r65wt z_c1Zv7At9V)y4`}3gjPvM)0Q7=>Tpi+t<#f*&;}?Xn}3@g+E-J% zF8{J?O{4 z3lGRZq3@AF2o!-!XC)3PTKV}wC`y%YUT{JdtLSia*PqT~KkEf)nFxtA!072qtAjb% z_I-!UL_m3RmVg-iiah0l>xpe>oP3CKnSKcanwSe)bYfXN8FsmH*d`)`;hVTK{lx@h zt|1qhv@d+u&x7+F3wzOhPfvC#7gmGyc+njo96i1D_|f`^oqFL*y09-dJmN{6$((7Z zsaFx!oz}I>Ai`}|;tHDljM6p0A!souVsPc2BuH!ZwOT}0JMGsTZ)+E)vIrPWrG+bf zrPCJ5k9f=8h0D^CNvMPwuH8~hh3Wk~-ZN67DIEDmdm5|zMG1P=D;Z4UASIo&N3G11 zt8*nq+i;yq7-X9Y*q@7qMahGdWL0IoP9b4^C9wCXj@D0!$7G)nG>?vtrph4E9D%J9 z48f-^MVfR{ikmAVQ%*T9N&wYJWC8 zX^J4*&0OyU|MAfhX}{`8aPxVz(%AYIBGxFxgr8qps6TJo)OyvAWT5=V#Alg$MVK25 zw0?9toAJm5j?HTMYdI75hTHP?Ko}ct@3$)}dd@cO*9|JujTH7-FkcF~P|wfKMf2O& zBEo8sjh{E;am$JnpE=W8Db}w9X-UTB#7BAi599M7Q(pI?tdhb;BS{N_3w!E8I$ep6 z(S;0<5ZI69>q0s?()UG{N*I7Lvt6Ua;$gHry-KIaW~4OBL?X+0-^#v>lt2ZmYy`8X zJp$@Aty#Aj$s=D6i#AOxq-rgd^MS%YWl_g-3xp0Cq$kB1# zXgAyHOZ>n#72C++ZJLs3{cqZ(5s+~FWY^lGs&Bauz1eb=Mf^#x!3dj&2VRSZgllP& zX0inlD#d&Fb-l%1qKzEJ5$qF<_gLez1ERG)D*XipZL9Gp#cZBFea$8@UuPX}@|{&i zQ*bnSGmD)FYwh**o?DklQ7&qepc`e&=PAgP>2+c6^RI*g2OTtDNgGF_O14`7Q)5L{ zUiqZdle<~mBAlc`9&wu=Qv%T2#$h?NT54X zFCo&DEQgEe75J+c|LfiwUHj<|2L^UI*zON z{{uOALXedjU9W+2yens|dWa?Bmr21nLI0#0kKG-L<=EuS{8M6JBvSM9O?qSLR#sL% zeE7hM#WY+ zwp$%gu$jZ`<9s@29~c-oL{_NTsL>6Y{v@aNFL{R%p30i|W8QqvnqC^! zHs0RQoeC^llmh)H`D)eSvveFYK*W9qWGd#+uSoF9sLZ_a8vS<8eqk>3W+g_`6{vul z%v)UeB2Dn!_3EILbSB>f-}OuzAkQRwDL_6z92}5iNvZ3g${Hj0k0&%Mt8|BeGvk3H zl3(j znCs6xU_>_Wm(9-Y_U7m47RemEOH1j~rKW+0Q?NX!g6P&Sl2+{K|;r>JhPFY7_T(f($cwQ}U zZ>ye69*DHX&XD=1tcNw-g_3~#0ur*|Ys>3TrhAg_JO>9LZqnPwgFqBF#qDy?jv@K_ zz}sn<&3YApzgyJV@A)l#PWl;5gC0pMhk;8EzLZFQJvHGM)~*xEx#|o!Sr|bjCWGs} z1)zCHW5uU2Gzj)b@kAkA@oZPLRd@NzL(6e>*W$GlH$0-m_tV8+dIknqYB@i8e>Xb# z-cHRF!<7ssfvt^s+5nWMQCnd8Nn6E(G_N-mId-cEo=J?L3xk#M8Xp zE!)CFS!?qs#pOgf`+cj$U@y`K8S);+9J-7i*?-E_ix}88JpE<*XPkc+9!MB1`Hr_< z4y8%o0!Whmy$WD}y{BNuln4~^h6&-lXD2`6F-WxR7|oG|Z6fx*bHbM1pqgiW;&*9R zkNE*;C2@$(^d2B-p8?rIyOot-_70*|lKVQ6Va((bDZTORdmuWTbzvZN4U8>DjA0s# zFKm(+Z4ks(A>brG$F4X(Ttk^>S<9tnhl=HN0?l!*#_C+0?u91I4iPt8Z01$ryBtZM zyth%|aY>jO)5|U-?+c4G;#xizenfLZyb^YEvfBfdZd(J)Aoco;b z^7$^Ga}!b><32!vf_M4+%|}Dg>vem}G=^8Fq82VY#9eYOxN~FJ=Uw#Z3n1Rs0;okN zJs0}I;2vjShUm;{&n8P6&Bs((nu=c3=IK{7x%TX{T&FKb%8(qG3@;wDZsN{Nff%aGp7}c?i&8}BsnnYpHy`8DG$*x?e!G3V#s%N9#) zROL#mux^59WpP*oyXQ3<6~-dV^csIL&1wKpgvgZo9N-jo#^xSH;wy&4-4DN>i`RHA z;N(wuGq%k{D_kU&E6{iiS%pdi^+<0Zs~qZ2+90l22SW#p?8OfZfbOntFj2?W);6zW zjS@U(H=;b@mRLnFo2R;An^?87GFm|KBYb`2Eu?^W zBuZd|P)Rk`JNEgFyHJ~&M`2blQP(l4t)Ymk>~;Cs9Y37!DZCc&YyY)&%%_sDJ_;PPdE3V*sx>KwU~c+>_+fYOxl-kP*T~@$}}3GIc1 zp^y^lpM~SDMsibe>ibj|{f~wG<=kaE0S^jQtMlC4fh0WnT)O(WCfV zEdxJtblomi%WgOx0?45*yCvV%x~1R~TjG7>N=BY*Pw$3yL74%y>|}$;{F}l ztOxHI+I<+Am^@Ti5($;NK<^!3xm|SKPBh$)5WVv_%&LJW;<)C>$pMk>nd2eFr6`K; zQN!-GfYxNl_`g7Zh-m?QlbF|n9*xK0?wo~t6GI;6D;WSEUhXLX`Rj`>l)rmX3V@RZ zWutuKfnojELs_k50AR8Vyk_H|wr<2RJ@sH%Y}`-Csv-EvXU%}*syp>SpRhwZfM(a) z!t53SjD#`)I8GN8HGa$tn7BG`vjmNR|FW7xmY$SojdvbK)-7o2tCcu86arq?P7_gSAk0QxTLP0ur2 z#C-&6`*Q$E>-ib;0=GPk_tv;HpukQ*jkq)_&Dgmo@|kP_e%@J4m`;4TYFgvo7Qm>C zWD#>|f-@x5Whyri0TNjU(4O#Mxpt*4Z(({3cz)llVf#|BLTQCVcNU9F)g+LDL0R${ zSJYdbHUu~gfYrMTAsDvLu6ic;F!pTw0L!aNeKYH^O!ZxCccrUEulFM@!n*fUw$;byl1SBuG4r>7spy3fFT+I80dP-J$Mw*$ac<)n`{(;8oBjzvDz~7gWlwW z?Y#wMb)$c+sAYICjZl-yA3gkVNM{-J8CXLn>j40()@L1(_H?+bkA&<=aJ@&T^xPH8 zz?0|Mq=qzVyNFk=TyaZGJ1_8MLpPuL!J{WP1pN5xcBU>~lQ5(v=)d{IADwJQn)~Jt zK;@$X`TXk1z|iwod&=##rD$kqj)bY+j1$wuF%FPLQ<%f2+D<^@n@t1Pjg+=d}b(H4M2f%qY1e*YlM|3 z0*X`rIF@SOtlS5w3Fb$Cg8{W)#Ysg=+4C;vv8Fz2>+5$iL6(VT5cksC8UPX>HKSt{ zbxIoUf1fX@jqS4x-cEF|ta?p-;X;=BS5|zDmx92Iu=A`VtxL08fVcYa_;feMBrV1I zl|!fW_6DU6ooA?%U>FyS@8MzA#HeF_nyHH4UDXXQ-=TtGJiv`47i331zwUnDY!+xB z0(faVNh~(Yzyc>Ef*!g2y+S804>gF6Upo8e+rTrbFGF?-D$TQCJNo)QD@(Nsd6~~_ zYDe<55J$1cpAZ-Kq3}V+w}FtkeDqP!N5^!2bhd69>LR!i9#S8>k^%&u|GEGW(X;pvuF z>*299r*5;^ek&RAv_a8wy5A{BdUF0F8AL~?#BK*Wp~PG|Bu;;D`*Qc?tYDf`Zyuk) zA9HLiX9G2iP^!ZTF$)HHe>U3Sq{fv`M&)PXS=Pf2NSo_#8G4K_+TQvd-Nv8NxbqL_ zjPbpQ^_+3Iwkq_P+4B2u(rJt4dgUKuE%%DEUgM^(zRhT}D`0DYSPA;dPeVHHzvFWL z*H<&h`}SXhP7~GDr)M&Rm}ERYdB20_3w`7g^VQt^uXZJsqEas|Z$2yr% z#~UB}d)AE{Zi+Jk1J`zsft5FM2RGQ;P$aNDcj@n2`y%9e^EAVDdbMTyd26hX|KNWf zMUh^`au@}1|7z2TF4<^XNqLAG&YOw6hFig*oWoTdd-*Za5*YoUs_yr;jrlolV-b~u zqWH|km)SL9Z7S!U#ya{C(T*O2E^j=gJ>|AVH<3Gs*@G+a)eOadiRy7U2?(zSdt}2$ zLK89nLb@=_qlKGn@wh|^qi@cD^fZm*4RD`iib1Di=C7IeR=zkye0$g#^)t#MJ;2sv z9_QmR`vtXuXJ|8ELYPoxikj`wO4v?V;h`7QiwW5#ow6Ac6y7NA5pEt^h2AY`7`+8K znFPNrz++rqw%Vc8scqujG8Fb5Qebe^s0Kp30UZ;X#%HNK8IiuY48y)xux1b;yb4Ct z)=+~jT8+S+AR&C&yR7&XCjMmde#0gv?LMjCsYOUq%*>tRS(e`t@h6D*%&vndLwL?( zL>+OFU#6-baea=A`^al5LvH%+3M1H-J;UWN`V0s^_33WNK0}tFn}1xg@ZNy0A+M*x zXaxR&ud;CK(YL%f&#InyvN(~@t)JDALH-$6x(iDp?Qed6{UWGY6H*wFad|>t%^a^Z z4_~ArkM#u-H<5QfzFEui8I1hm)f)SxcD{1Eh2=)f5Ocy*2-Hh_E73Sf7S{ZV-cB3l z`c%h?g(NRJ_db)ns4m_cobNPA2}`-_)wAEwvAa2MzEoTy{^s!iNhAhdp)1&GN5#|F zgY7(iwb5$)}R7mzD*J5D;{l# z9;891u>^P09}Dt7D)fJtRF3(qo}>W2!YEgHu`4jGl8;PUg!5ej0FCM4x9ybH3U?c3 zYOqLnPe?CY<6O^mjHa3^>YlX{V)|qvg3X^0W@x-wZMP-FM?O|COR--yv&%68yfVz3F)U8WJ23#h z&JJw8-BJJm8nm}1_M;P&XqS5S=mhaWuzN6D*()iqD0xa+0&^I_zn=(W7UUDOIjqf{ z^FpqD=%%GB*0QnFZ^uSIu?h6 zHQp2yXXaED<@GEJQ;!M%u}d%c4@6wnJF}6W@eBg6`Tks4{LJ ze|>BZv}|b3=$J5=rysrooV(L_7|(z-F7q5_8b4@RLPOGgS-a+$G?k5h;HG+1rtO?@ z-VYJ+i68m6Z@PsW)Xw_g%6q1=-j%Jrv{y>VZL`BpV3tF-otVP>9#2jmw5r3Y#|8F( zhOcmFCJT2y!QFEQ3dw@h!9e2DSp;HqP3|rxNvW6_oR_;0pVr!tsMfO)-_M_JiXa=s$Vu&9;yLIFcuOmBfL4B6(q#su&@yivPvHMc-Et!3 zrrDS@d~Kb=Uy(9g9nBw?pbxmBp!2dX9y59z_8> z2;X|9elQ7}kLL?DBm6$7S0vU34qA991798}G8O~=&K)cw)!NXg_LmkV1|-dl@p zX~?I2SzX=x2Sqg40Kt+%tHYfckIs?QXG-;qBXNBGo*LvybK<&yzwPWVtrV}oVrrN9 z&AoF8d3!0co@*7DwAr3Jem_SKIR$`}Ma*SS?v~RWG zKL|>B3Hkb53#D#oM0}1k*4jSXq@KatP2Qh)c>n1F;zrbg()5D)L^XMdxYglq&Xwro z71^=U^K*N7Y_a)%hQ8ANskO`+-Up=Y_QS%d?T$__Cp2@J6!jmpc+4REzKni$idktk zYV@O{P3)Hzl!=%N+N;fvRv{omW_LGcKb?<->N7oK!O!kYGtp z;tSLTX51A4^m?3XybtS(k(Ti#0&0mnsx1J%f_K%fl)mYRp_{G$v}uBTHlrkDgw zJ7xG)ID0-nQzEX{x|qdhFl7~a2*u`Ml0L^vy?|6|9zEe(7U(9snh^O%p#b=;VQn#R z6qXHdi@+m6z8Vn3!8XY^a>2c;Ma_R)UmFd@nLmO+^g&O60~OVpyhd^y7k!7T+?loz zEmlXuO4@q>PQQ&~@M_p@8V?41$#&691>v@KV#m=x8TbL~ zT!Y~{34s9{1ul5Y8yr8L{bhC^SU_IwxjUDb_RZXj7rM}~r>_#Waphtn)elAdR RX$s(@s-&g(>49a?e*o6!&`|&Y diff --git a/docs/img/chart_form.png b/docs/img/chart_form.png deleted file mode 100644 index f73daf51d9bfcaf67ada686ee7d7c6ee67a346c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 203224 zcmeFZby!s0`!9?L5`vV{(w##H46Ss7v~-I!1H#ZK5)vcL&?O-qBHhgZDo9944Ba3d zXY+jHJnD1af8Y1L&d&?En7!B9Ywdg8eShu=Q&*M4#U{r_K|#S)ke7anf^ugQ1qIy= z6BW2pg;gAef`aqJR!T};K}w27-POt3*4_#QMIN_A*F#IQkEFSbA@E%gVQ4_y=MCq)_;jU{^~9Iz}wQdwd?EHz0dF~s#so*R~Cf7s^lWJFSuBK zvyd*^ryvxDxvZwBvdHI$finR@m8U^# zM}?{z8sc2oKq0sFb6meg@BhrOzMjqEBqYJSRW3A|M$3l3OnbqH(nrw$;0;Q(b*BM> zB~G)J$jk_xFmzWA+olloun=P=;+B13r9uSWtxWG*1r=zhGQ=p%_xCR7%!v0p2xvDM zh!m_a%N|ddun^EhL2&0apwJP5F=>2zzN=;C4JSRCUeY60d`sNLK)r&jOky_9cPrS| zg~nGID#oRRNF{BA`alc|-QpKb5~PFX(BX6JDtvL8fh7F!@_X%a@3lH_*J@`+C_hQ6 zbRr=R$}~j9n;)lp{G7jC7DY%C9($iZv;-YHNbAV6yc)#jg3rvJ*MoP~Q)N|0RPZzK z;s=Cbp@7zqiwjdpQuJ$pjg4YQ4!$hp%??)rLmo|p+rm$ouS-~%5PV0jUWH74b-&X4 zN>5mft#Z`6HIq3B=HP{x`R39BXzXPuwDJD!hAis7O4NIfZ(=&-J98?eBjhQ`gF$Cp zJPI62pLV`T*-)F>$6w@q_&-IKGT#&6XrTl0d zZM{&~2#V^*dcX3pQxY=>l{&7#y49`sj;)=Ck)RjFDi+2q7Mi=AKZJU5BIPxH_Nu3# zbnPAuw!ut}bA8}hFc<*eQ>h{x^P?V{e|DTzFK_cCovwE?$p zH>7{SGvljCQ3o6D|56Cu89|EhAUQT@jmpDzQrC~QfInaLn^`UOZT#!Wh8Wd z>ZD!ZaETF8qhwqUP)ns*4-(O3C$;J;amhD;mpspM)pLjI2u-Sw?yYSPYgNY@tC2+;CgnTiELM?%VwDMLe@_ zxDEa213Xy2-!9mSQ`5hN$$=6jdgn*;X}f--bjzq>fJNdPMVT)F4;93ow@OpwaghdK z=>>4U?L!xBB|)Xd>3+Z#c6<<=)GfxLQHc4qYUZ??)Q{DNMqW5~@j zqy~mNla+QntQHXdL3Y?mvDyKO2&~}YvJQZo>m=P?eg!qV(jz5)-DOdzC!Fx655y>d zVGt2-hMbVKr_hPK40_n6YQ9n-uqkkave9mAPVJ0pGhbiJ;}{8RYucp|C0Far|Do?G zZXChgmbkdF$L7Or9G=n1y)eG_@`7wu2_@_CE+$`l^e)|#P(8ZHUWWUfqS5i>4#B(y?^GWbU+9je z7yK-=D>NuPFX;IYK5A1~raqT7o9U;}B-I2f9IQ4w;~|1iBHkc=B76}ehz5i%IXU?< z`7Alnqg?X4{9|^MpM~=GBp1b$Lac_d%~Y&e^#}AD^dgI=igoP;rjj4_s#=g#CRPfoJT}N|}$; zik@zSj;WLwJCxQ`y)1s2e#E;a_I>4}h&!q9n6Q~JT^ghCl)LJh=mMePT}MpaUL(Xz zo7mvi=l2`mS7q0%Hxkya3Z_0x<>cqAerHkMpE>j+o!0iblh7CE;CbSX zk>DA6ce%5u`%xSZq0UQAvbE7o8jWAlEjf4d#_LGE`DBE`@!JR`nmA0_9^;B`WbbR(3n%YgUjV(jwE7Z zVmPrKM>2;OhYDwp!B9nU#l@iFO#gtQ`NyI4;%^3a6dU(9oQgM!MH~(tvhCkGys+Q? zUg>b#{=lKgzHIW#R^z7Yrofa@P0!ZM7UNO;Q6i-?1|CLXhjmDeMzzLlj%CgV*%>!W z@EtHQcuYu~bdcg9pVybw`Y*#hdQl1#RWA^eJQ-=zX|8E6g=SpFT&PwWd+XXx--kDa ze}4ZJnan=*XxhEmu4&`0AckPY6!9m6ZNuZe?YUaPI{*FMLv_#1gVp`&L*?V?CFqFK zIQx(td>rhQ>`{K~vy(Hs<16Ze?R)R+0=co8ve?k`2~s?$w>R<7g8CGGj&)IU`tkhW za^$k**a;7G7avUvT@b^e}_c?Aq?nuFcX)w)==Nm{n`B_G{yprk^ zeC%9%Lf5WkJ!wDJRY22P6)f@1`Nj6A#Rz{sR^CI!{P+2}I||n@`aOkK&LU6P>XO_NrAmSvmxgR?P3p^9IvPhVaOcT4JE<)O*GAX-2xJMnsE)n9f(feu z)Iz)U!wm8}vI_~*fteUJ1h?e54Q_lK8A~s7E}E#1s#C2i_4P%Zo#7@>=s2F#YEjz87N*cX*lmVTH}oinb!+>2UzXyd;hCVu6 zhudk4a~WqiH*Lv&jWiHsp0jUZtp5fTIX()Xd+WE+Q1ydr%W+Mt#QVW<%iOUBO8%Fp8gbnL)&4#`a7VFa3I-Q9q+w?p?5ZzZJ$iX`@< zQq(fjMd0eZy~3cXP1Ja}igT+0^;QfqN+&<6{o`-gX2f1kdv-b{TaAoD*v$)`zFS+* zMkQ4WjAGh3J2xS_4I)PoXFWSV7dSrl{~kEB{^=I^0mimh+#~mp3#u7(lxL1~C$N19 zi80&!%WO^^AgI7{me+@%pb#_O{J*8}lwlVI zFd79#)KeJv)4|HkjKwjrlFw`b+xn>ekv{d@9DrNF}mk&ZqCBo+#VhtTpoN}POdiGJYX=G8^p`a%gYIj z;DkUO-OM~W9U=7pddPpCBW(q-aJ6-IvvqQ$xp}Ucxs$t_7#-csLjV2uuYFp1+WzmA z93lUHE#L*YZ|-pOaDll0`)pvU=*?AObz4s>dp&7e2P;Piu!cB~pdhd4Ulabg^}m<= z+f@C(P2~d%{C(=*ZvA_zDEG}P{Oy(gMX$fE0t6Gs7Uljg@x`%8z^cY5D3T}&(vP(~ zZ*9&%8yzh!Pp*f-_vR@rf*}ajXdL(k(UUy&EIKUOTyd7do(eY=7_~I5 zW-vpadWf=|(DLM-_uiH7T*gMjSksR0j`xmlhWFh0$Lk5$hVjO;@%qvg_Y(J=pR?o7 z><}qV&8O5?ESUkQ*#Gcrh7D9>z;4}iUtRJR`k%i7KcV563*Y0$k^UbZTS^QHo%}Xqf&Awux6S6^MKb$QUCNJ zz_ryEw_Yb1&4}r!{__CfMeqFccK+u@|9!y!bE1-g|6gR?ZiTlO^7HQ{Z4ON9W zHVfz#y0O<9;|qxaj;h8-SO?d$O5(4kEWZ6t?bUmMpQy%`VK+}|eTS)>A`t27`49|i zKjhk{rTvKiyY9pXgbZ|ZV!_6(4ZR_=qlCXbQMLlux5o6TfDtf8t`(frvG}6!H{G72 z+e)*%y5((9`WuP=`q&x@ECQyh?^FDZB>yrHATNNJQM1jTkiR|kUmqhd0X<5eol>0p zD{=n4(*LbXn@+Df)N|@)8lcbm91IR{HEg~iF%HF!qL%Qx{5ei7@{;8m={#=aG2_zn zkZ(w6(XOoNX!f|-qw9e#rEA1Wy7zXB5*OSon2ZIQ2eace(A6;eT~na7Gu-Q90|ifosNx8I$h05H@b??T~Q2VRG%&OCJFf zKU+O+xxSo0e~kF1YvNm4w@luyqZjwbAx2OIW~EL&@*1@-MD1Xc6m?mK+YA{gCxK{3 zq~epT67(wzPM1@-N2HPpq-A;{$oPWZ82rOi*nnN;d}weeM#V)-I+-*r4FA)(29^>4 ze!wFmGnqf{-5BZ2=z!r)``iUyblJ*QA~n&ylSSBp@xyfAzLM2OlIOF~6$d{x;bX6s ztFsL}vBS@RDw}w*UAJQFv)9!Vxp8nInI1yzmxO2}3AK-9Qh809{F4_4$n1S><{1xuVw-? zd?dIOmN0iddp!2cuEdZK>!>|tWZ?K-m#gB{*r(>7^LYNXF>|m#rKt4>dSjZ;Zg4_o z>mdnEjpwL_+*aL6Ixc{B#?TAY-8npM20h%k*=i4>fj09XcKoCsct zsz=9DPA$I2GlFxe>2~n*x*jU;)R*6F>pFw}I8GLj7NChk6u-LOPfABcqXUe?VdWk6 z(%OX&MWxTUXqBOTjEQkd@@i?qA1iY&JWu9B<~-(nD}sbrskxgDzw$~kUM^B!pLn#K z8(C#h|E7$9a|?WoP1;GxkeR{`xp}yIh z2bF$mVn4O=2GF8=R0Jk1qrl60&$HenIo3^>haefUFQGiwMou#WS1fxV}xAVb@eQ6dfP0HiB_FXlP>yOK0}wlmp!6eY{Tn_0_pWH>0-=+w)Sowx%ts zrD{W~XMg&s&!qtGm*Tl&wxl5T?pPXyQ{&!pij(#B(rY~N39ko&v!089L9RetZ+o<) zcz-?6mOylJjK_#yZDsVgU!3gtUpEkrA+(wP6t_SDz|*e@TxP;{OKY`&iPa1WO!50G zshPxcQ74l@t<&7cnt4A}UI5IQEa32r9Xy!x=V4USnc`PHU_4;?{0&ta=X#^%swl#A z2-JbI0Wk7cCGMutr%GEb*H=M8Df~aqHp=F_HcBQL4nXm|`DV-QygV)ooxrI#7{;vW z8bjYxJsAA>|GZPJ%sFWE_1Sm}lUjPJ`@#dU^UW%}KmP0#JD%>~ z;~$`5Lbn!37=AQv=)+-R*9_>}1lSh@;-8*Be`6gfKO_{ks$=iU_sdcH#^oyu_N19!F)^QkI)QI5jf`eZg`8-I1o_V zJ1LI!wkGEje&<_t5Ycg0iW4XQ^Qng~C(Je{DyAoW5j;KoEs3H6f5}S9X)uN3$>E5a z|BA>QtDwIB-m(!Cr0PG)$DNb{a$+JROfbp@Dwrx5=QnF zU6tbhM3%O@fS5OCF|vTf(5=&<%YYGpeKr&CiV+OqBXOOyN^H73nqq%s=W25n?R2fR z&xadJ!%Co>!2N03Ra%KJ3JSb5AzR7P%3@;k;rrUl#^!#;KUJfoH>$r604c7lkCVCiTXu<1l$%sX~Rnp`H-t?=+ZUpyA6qd z~z_GN|jX23;7Fu{KmhSkwU745m%NciFN4#MqOycS!R!tmEO4-o0kSBCEcf}rsUi-^fN1={h>yd#+pdN6l=bqr|B0kB!k zq;(3v?bB(QSrFgdeATr6+x$=${L?!BRNtTR5G~FYWP-u-9YDpsZp@m}`1Vsk=T|LZ zs&~UT>}7A*Gh0^)0Y$jQLS4(Ue=^3+N}pMQou$RR zl=wf~P5?aPnu%%oUrF+xm4t70rulVS?w@ae2dFhkoI>$?5cIFWMlv7R*-ORmmw!m= zUnGz;0F3OEIfVWXckQp>Yc&qonW{nq^*`T!t^|%xt>IS0-%s`jxQ~6595R1D&NLuHGpXQk z48%WTT%alYsxR$@n)yebA`a<)41E9$)pt711BP;f#tk7Za;JR5q6dJ9ZwB;$*o+QE z{%?DiT)83G(04J`Ov&N92;q(5yBy^xgQ=oWUKd2VQyK0bTjxWyNM#=-dwvYWg*kFR zn$Nc_c$v1W%$G$E5K@+q=~|bZ{L>n(-%c-(mIUDPrCj0yU;`IDH__?Eh`8C>Kk&^6V9l5Re>&TA`fy^+;M`}o4dZ$@L}D6n8HXjVeMZfIV*(Dk zwV%E0q7Bc_d2?co(#ZE3+pBG>`^erMEbpwIJwECx+mlWhcHet@W12-h+ z6wu5obAS^#X_MjG^mB|NYQONh%S_d13{G3kv`-w*g1g_zC z`pmx}Q5BjGlB0s=nqLC$57Fe(3XrH_^pV8RiplDr4*Mbs_{%}ug?+$Pv0`s#N(f+9 zyS78;@AYg0VNH4{KEW$}+xPLoHDJJ1mGIvSH?f5CGml97M*-2@jlY@Dc=X+FkiK2I zGEW))B;BKF!>D8a-2;lK&Uc6O71p0a6FC8M%s0qC;yij#n4eB}CEq8V03N$je>WMr zS<&-#t&Z0ARnluwOGiMk3%&ua?6kQ$?@{j#V}hRfZq6G`^_o!3AH#k}Re+?D8 zvS~m^B|ulwj)B)p5q5u3&YJ?;Z3CRztBmW@j1hwR4uZFUOUpw(CE7hMAfGVz63f?t zG`c}=K>AtUmq9?Sz)#EkFYTm3%qq#9hX{|x#fRVq&Vx8CI(`3(MQZ(h_^?l2m%L8h zshe6p#QPF+*~#x9<;JTYBr);XYr|M3f3-B>SQZn@tj0y3q%TiA<(z-XrCdKY(5F&) z?{fRS!kyORS??Vn#@Za!R6}w|^TT7m8bSuUn3SVuftXCMS4)Knc5tvFaSiRgCi|T0 z=uoVfs#EjudrobAV@FoXD4OF6Ofw@LR(XoTy&$WplUcui&l z;S`NH;dCmH?C;mc9U+6-ip6{@PmOMP7bu4xd*-p#>J0>TuM=5g8@6gFE~$?7VTV$| z(6Sw$a~nYUSu-&?L_jnSWg?8zk0j&3$#WJhA-gQwJn(tDAr@-a4h*}#ziY71u{Zlq z2jo?ntz(ZU13^Qfo8g~`F&+EuyO*jXH9G#cT zbqQ#(qn_3a;oQx=%3?Z$K$;Jc#>V%KNlQ`djh(9>5e||kyz%li5>PKwukLc`v6fdl zoJJSsbpF`q2GTNq$1>K&Ew9=Yo6vrG|~G>nH9V_rmGyp-#!Ow7CjSiya{2%7jOQ1M;gh7o7_zHOoJ|tM0Pn zT*^pbcFYlS8BACn@m{g3A9ur6-~!0*Jp5+uf#y@FpbueF4g{aW{36Jrz^GNqvxpA0 zh^9Bh&kBsU5DHj0cajxoBm8;CF_f(xdA{Vr*g}-`O*h=RKJ~lZ`%;U(`*uWuvSmeZbVOpMCu% zm`ZH9U{o6}wjV%W2c+RVTCmMpxxkkeQteh~MF*^m(O&VuuHfDQLMrW4N}ff&~J zm!m|sg*J+-oO_?h?>2tn_L;vM|6H8$hx5;Gy0I*LWmQAMJATIv1B{$`tSdm4=IUlk zrvu{V_uP^1`s_=_!Gf$rLE~SF4)0$#W%l*OoE9$Kgr=U4)wviR+qiVPY$o0niM7t% zbB^#V(XoP2pR{5KeWrV-GB_o4!#KL@pe5>%_0{mVk3{_|-|}2{ar;#e_A}V~ez(p4 zhF5*uxFY5wx8ySTxD6bb1EUZB(&ZsdsV*u@^E&BbAkyF#8wPpKYL3%&xkJ7C{JVjA z_^j&;a{0E3s4R$K~pC+#nC3uT8y7O@i;gXrn&%&w(a-dzIY;^KZ-g@||p38EswR=PTwU%D;&nTa!h$Vpmkh^+q77(2!ce|-RZM6^xNh7X-yR+ z_BHhfiV@dWJJ(l0qKwSteMA<~VDZut{q5Jx3?RX(<~d$I0_6`zb0pC0<<0Q0K1;i# zkT+mz_>2)`@R8Mgbl%oY!m@cU(g~!`k)ZC1L|>@;wljOx2fMA<8L`|fP)N$rX!#>RFkL_YxRYh!-Ac=2 z9vko#Q8zqxS2(&~w8K&(HFd+q`9s@D-QicIu*IGFjZ&AltL?Po?*`ud-2`Py;$gb6 zgz78uOufID02$T!Cn(8TXUM27Kj3{3>~V$POAqn_vK$quMe46aaBshYX9%GZ_0Vy2 zZLL$-mp}F;*VTvP+_TO$R5}BS=-isD@LcGO`#3>_DHch!hU-$}!drIUSAClj)~zml z+(i9cK#m9VF`Xy2_8jkJE1ht^=HMuWY*-yV(->+@Q4#kp2V{ zKPVGo|NBPLwG1S5KvTOLBy+i@=HHLW#F_06*Oy)PJVKRu2zbvx&CdzVd`RN@!Y5G3 z0yczH)mtdR?3^g#FX@6EqQ}k@D2%4s+Tq|maYC@<29d^EnM=BtjtA1rC@`nR4PX`W zF_YkX*LL*#G7y{|~Y@ z%Nil=1(}rye(MuimdIOpwpOxcaU&NvBi*znWyTl@)xL*30l=dgm7)l`LBkeEme8D} zjs&Wef~*HVa?Yp`W_@yZmVg9zKkcx3*bb&nnrlC+a`v#e6_(4^eN%=5Q*3cwH9q>J zy`OE-y|w00H?jxx_2k#}0HY;x^8Ok*9wteKyh<{3Sf(sp9syCD5$cDpJ{Z4Nc1P^q zGUMRl9u_+`tobl~OoGIBU+A9iRcn275kl#T`53?Sx{lP5C*e!EB9cQ!k*WN_@xzse zJ%(CD@83Gqc_;Sixa>6o31}DhK|@ulcRkPY-qa9u7VCzo%g;Iuhz)g~(BZkC_sWU4 z^NZDIRz7efm}cn5-$`P+eTqd4dLP%2qOU~dSXOGUcTI}4Y>6XtW|26Gw>FU1Pw{Ye z0s<(j#uRlUX?)XZAfBpCNkWV!uq*-2^yBgf7#dwgiPr1orQ#{Tv45oyO{Vo7C`=J$ z1Q1xzzl`QCWwjf~!K(EKFSxnUbuHsG1fNGbn90TPQ*2lCP)S!R^v{lXzm1{bN($>-RdWl(r%r=DIT5Ql~enah8hEM zXqB2Q<#fLF3Dyoi49$hy_ADN}mfg+By}-jT8jhwq&9V;d_JMfC`fH3{vrCPu4+}|* zdoA9bGQ4|w%e#b6VfmrXRp1^}L0q?~cj3wB2EU{&0JpUw&GrFFH$l|=lj!lR z>L$%sB=L5e5!l}b7Ul5=jqe!K-xi@0 zct&;KN`o_xFE>6cm;yNTpM<1XU|L5&Cl6v;FEI!WwD;?r{?NUnc8_7-`NvHL?4si} zRhg)9&C@472&ov1EFE6UibgiJD((GIiLV6lq0A547`A)iO&JtN-oaUk7+$?L^SDG( z-uHXBg4b7zsFRfYI1C=kb@Dmu_u?QUOl7I88{`<&!N!RxK%eeK>}vrumGi zSlx)2WmVBFb6Ds$8R}p5Y6$xOl06NiB3k{dtXz7r2!$Ftxb85ReY^$cy&H_>2E}u? zR{|@0FNJygK9E_;b}dmFO7(^BB-3*ARnfBXrA90SNL)W5RwAWF(%O?iw8u_ z{iY`G9hDguU#_vd#d3Vlb{9%)gLwBIetYZCRSxYbRGC>Rlm@i+@u>kZA^h!Wyy#MC zBT?#!OfYjls)7xFRb}J6T;6{Cd_-@DDpq1=xFcq|fEHbM8>OUIZrr?{DJsd1L|>?0 zM_R(AytC;3XS%#_9RY-#nd=CcwPQ#i;jZgEXA(rz2>!}1y(~le?tQXWz zGafI^bBdwGkY?8@yxr?mrV8kFQB=a(fQ%ANEigj_)Ic8G+tW6F)S9RLS?$V_vZxQ} zDZ3X5po^N@)y7Okwf^wiS(KubIygUom|MVioubAyxUZwGORBjUaQBtv_Gn<@uYT?( zxCJe*=`WjFqX;HTB7}$V0>U|fyinfuS(H60m3^UNVu0> zzB@d-6GNNm`{K#3Vk#DR51j|=ErDT;TVU?@*?HoqTVry>1< zS9v6tl1>ErW|F~xq9|E;`EB`KtL{cHAyVuMvUy#%7y13x`>+`NU{j*f3apc{PgYUWQcG9b^6uA*eguU{d6R+L33ZSae;CeXVu4;yn6)9 zGCwgSP9DdqEcopr)ZxvE0}7oHA6yk;`9CoW8V+#uVwzhc>eC~$?h;5Prb-8e#Yh{h zgHt&At`^#SE)L^kW5E5q!eiqT^zwjxw%%}e6v~OE(LiGXKgAUImZGeeN?(y`AXf~B zyW!aKNV1C-ysgN$HNIM;dcrA1=qmml!DSV6Dk5+gN*CORM13u0n^>K;%TNi2EL;GtPEeP0g}1nc(dIB zjAtunRJJk^rA%phwwOK3KrBDRhYN*kUy%gHfDd-xsr!+>kGBySJsu$Rf-FV~GKG@a z4KP!b2br#-lNjP7`DU|Ya0kq2OMdtslaX(*8`L$%|K(iQh4TU-kxOh8JNswdWU1hu z^cq(>#uwDrimX##bt{nju44iNYyEy|5tB1$dSuK13H$FP>-?t>@|WTQ!Xn6HWjZ^8 zZ|34Ocvb8>r6mD`4|~Z38~x!QmBj`ync9RLQ7naVVi#+xlJwy{)c!v2gHD8cl~#kT zDnZ=l=l!j&T#ir_^Uj!s^?HTnwlX?N1YRuuBzIUay3Vryp8R8T;U!B)^4PEixD^gQI&?~(h8uL>B6rfXlPAVpE4Q|~{3vIKhc?j}&xvZRu@?6P$CFPHeE#ol_& z(4^)TwkrFcgf2-2cF@~97VaQsbFa6bDLyPC{-ckjj%lM-5I<8RB4o|B@mY{o)i9o;n^$55%T7t$lLP|k{w7I zP`n6zQtCRC7iHzv;mN%0l_!Ih{XpVf-!>)!J$pS<_PeY>;xDcwQ1+!YaAH3qOTVNG{;KEaYJ>wrFm=nh?`tZKGE zqZNy;%qw~IW=3#=rD>?Te3RD_t#UWsttf;(Yb>nzI{;gNdYOvC0gsdV;s-{Rho_Wq z*4LaU;kVEB@KMrHZYo6I?%G-@1UK5UHuS>kI2!`u&f=;bL$-bpp{3O3*FL=`GB_^v z)BSn}D78`<5ZIh{3pQ)$I7vo#rphaQCJR(lIgvjo#OpwzJLBgCC6*gI=pZROyuebm zmOy;5lQ%FMSB}@E8Z*K}J$&G0)K2XCHFgI%pml71{Phzi#u>D~W(1HoW=i+%9P#fHh(C z*ksu*Ld8ww`xa-z-T7g^Ya5pjVx9%twd`ikdGD7#-@=-jC%Krmqtfif(l}?qyg0O% z5${OhitB6mYsKdd*P)3_(|X^iYK1D?;C1 zQhhBUZq|^7tJ|Csb2QSQgPxmOLFiHpFI}^YO&)-4IS$r#>z1!clRc@{WH3pmkK{kVK+n)<3rw@fYtMb3jiBZ-0O^kyAb<(0Ii&CuU^5 zSR#T{XQm*4;Cou>h)415L&iZoo$@yJ)f^vUh6LsIh!c5VDb#SlNm}?}rZM0U3Z$XD+&hvav7+%6%0@3vG<3h2;kY#mGZar6hgI=v9QIoUY?p9&Euoa8Dg9SgFmi zq)c%O$&qhH2YYz6^K^&CI`5z?dE%X%rQ7^Fy9pMOUyI;Pw4du}m`LC$_LT|+F4}!l z-4J4fr> zh#F^KKOn~~$C%RCDrnH@J6A16A%ph8M@Egq1W32gw;fgkWl}!ASegj;e%5>dW<^9E zN=n2?4Mf)Q`hlWPK?(RWFFcE!^vT7vGtT13JkOzd7;!B0_crNnlL2mE*!q6-Y5sjx zfjl|f=iHpudA!VNZcit}a_dTj5_UfiH=BPXgD~qEZilOaw`&*86_9OF@DiV98*$gT zFht7*l6zoJ@(d^^MV2fO+dr}1BPaUc0n~XCvYsXYpbso$%=FAV<~v#|XR}%7t8xL}__(Qrp{rXUgYFUz$huOBfqGKo3 zPc^Q^wb%%XjNhBUsCN%zOn7b_DEg9=A)bi7shgJi`ek-O68_9bsZuDPQPD5;(+KIn zs5@7+Lkyd}cFEBTPj`FiVGK*1d{?!5u>)G!ghz4GQ!A=!`xqW#ijN zKtl#K?QN!7tMa6EEBw039%6~HeB7E^S{Ih|8ZzllgSMr|o_l)Cl$*{ zBX+@Da#&tkNqI+hS*`qTr5!@A^5|I^qyrnD`K_uAXBPC1gc3^vM)`jC zdOs7tKaknObqIpavaH}c9@wDq%I_D1H3YCv9sR5*JK;U0Xx^%Ss@g3Xy|PE(M&|Tt zp}31Sp$GkPU~s7Mt_X9f4))pxP$2a&k@?vmYB%LHl-1uFh=7M&EINgb)qwFhk(TJGLQ`c^l`VSht2cMBSKPX=5)o#jBapLi(3aS=1Y%R4(ZF{ znMISo_*1nJ?5J(j(2nai82*avJPeSOdz~ar=9!JWNqod{Zr-2+4u&OKwtL+dnI%s9 z^oDdAF+)oh=xM=z>nWg*k!z94u>K8%SNdZvR-5ietZekb(~l89AUzmSQFh^Tk4ipzZx2~FGYhA?sq4P)s6~q0{mBOTr&{HE28rK@R8d$~HZc#A_zr zE_<&`-tEMo9~(v~g&j?#Hd{w=UZc?Tw~VNY5H#aZ3Pd@Xs^WZsr3MVm$lfW zWunnb+Bp4;>W{s2;k!EcrC%W(@=Mz%^5D6PWx)dtFEDGw@d~d?j6&KHtB2ebfEx%e zd}T$uG*9nzXiaD1OM>DDq%Mb7Y-R8G@pbHJW}Mxh(PCxe!!dbPi>EzEVP7bDXE`ag zD`+hEIbviQs4X32#qP@Dri}Va7Lis5VX~)yio%Z#t3H%QgUKRqO0dzzioON3P7#v; zFaa`!=*Z>!o;0JPp--OnWE$>lmAT?DqCq=q}6`d~6=m9Mu zSe?hxi+k8_FmP}lKLsCsIZ1J=QqnqYguofuF2B^wHX!)@IlZiIa|ydh)ODQC*+@p- zP9=-t>IYg~+t6R(!hyzwq_MGH!gFlav3vYdc{DtqqWVM(!wJjns0@5A_Im2PEY0sp zkQL`HPhTPIA8n^EJ1nO;$+LS~zEGR?Mc9ikz;3_IMd{+_DlMo>iP1UsmSgb6))JME za>gPkZ7%&*wLBV0UPZnYjH5!hxgAO$_2ZtMz|s)1u@R=iRr>?gTBmZ?VD3z3 zI$$!lfLND@8wL(T6JB$ZMb#MUS2f_spR?D1RuU9OFh4*ECsfI+80>lR-g&uu9|!Ff z&GgwI%OF0^@$0I$u<7@+PJ&i&%15FXakoQI$&>~|h{P!5$R;P;yI)sU4>16T;}ySZ z7lpc8H;`nJHCGYDCUN3S^VwOzHcjv>?IL1Dk(*EC1c0mLdx06&wM)3eq>@-f7ekUQ zNt0>Y0;OR1&6LV5H#IW0$x|RZ8Q3h|-4;c9JAD?RNi@K$mX6T*Ibw_RV5pk^Z7H+V zhLK6(orj(JiBUNz6Yd8Zg~w7_^{dznkLE5Ht>w8oIQt zt)U<$tb5)-E-7<6rRPJXPw7OR;|g=)VG2!X0uj~u;I!_Yc1X}SS3)azq&j_8^)hV$ zum1tRS#jHV%_-N}v#D{+O?H2-{H-q+Cusv=u-#Z{17(l{pN->D1C~Wu-!8p!2j_Z; zbRZT5G6&^+##XEYP(^d8E{`%*{inR|_4k$QHX))o`=pYXV-rLe*#tk!nI)vy)JIB* ze=1=RetBQ_XllGibb@c60<3SG;YL<;M_B9Z`=fz18bA5Y)Hx=w{x{&2q1t~s3~OYa zlwrnWby+BU>3URl z12H#ER9QTY-;4fY%(W9(Ew^{L-l-w7lt)r~uKl-gI*@=30GI~7G$heZ(Kl`MW}cOS z0NC|CElsMmB-oFou=jh}=|!dWZv~|xBm#Jr_ExhdBp=K3i`pGFMS05Ospk6v+}!Xi zK>A7JHZJGT#;r6gU+_%)3MZ%rx{kg>Cl$bB;vcgany|-`&NU;L95U^8%On5q^)OKO zP1nxq_C`X#|6vX_(x_E5Kc>%68XBr)J(%R%H{F!f(#@8RrI#_20B_*rVc5~%u94MV z@8F|`l4)7ZuU_>2SYmCOe3`vw_3p^ zPOjrQe+j3j3!Hr=2yM#kGrg;FY!K%*0I6bdCWELB=$_$sMqqVCn58dGUE<9<;C*0* zEgZ23SH(+}@(pdv{5?nKb%Tg^I?O5gwdnir+-#-|diRn2fPK<0=$ron3=V?;d|vvh zEXA({pMOL3BzJ(uhRvDgk5+%D^Z!i#AA>qcJ*bfj*fdr1d?QJqI! z|MOh{Nz$O3E;^w11?UNQ7n(t3n?CrvGBfqUB<)P4THn?L=4%Bon|2#50S#?*ufQER zd=U@N$IeGWEsmatJ0$#m8fic?%OsZ7F`(gcqyq#`KR<0x1Ym9y1PDxX@-|AwCMUk% zHXE!4ppthdpH-q_M+o1tHvhW(OIvJ?BmRWx-_<_=>NmUTn87I^E(8|Q{sEvDv(^B> z?0`Wm=z6DVDd$1Q%U^wbpnKL5)k&;)@Gm@kAT0sVs!WdD6?Ub1vu3m;_2dQZ?;4lJ z6o16*M(l8!fKFY!rb5PFy&CKY^P7Id+5?ArU<@n(0t)H*_Vl+mlPv?({gq+l2LQHOk8Ag1vjRQolrAKO&TVZN)YG|Pojk_?S{(RkQlQdx&u<0b%}=@#^sV78 zi`_S<@ELstJD|bg7k&cz%y(bz6O3Ewc%t0}8nU-U=1x@)FqsA$0LYI3&_j0vctR8W z_ThJ~1sG6@8DNrC4o-!@JXA!R=*4cwjqK;uU0uiDbV~w2u2Z1z>Ql_5=r2qDwF5~f zpqpTG>*}Ms5rZ=TO#*uA^sU}l16}k0c>1{UV5kA;NC4U;Cydl3F4`{HEK`)y^Ao)Z5N(5m4Lq(Mmx>??r6 zRXJkX+cqO?(%bjN?>)cuk6S;*dPs5Wh0Kzi447O!l?7o*Q@M|7#A7mH8#pyR+G_>? zmmuj-rF|a+;74wUn_MiUdCZdC1;Fh;Z{T*QZ#rc?fNT^8bnwymQq)hquKPn}zFT(V zCjP$mAA$bDhaS@|H{W?OTBxc2MD*|mu|2bTQ@pYaozvO`dIyXF0Db?P+?%$+r@SRB ziJHAY7wRbheoB_+0D2S8>+Xtwt=_)rRs?XB_e3_!^(ukJHh(0$39AyQSdt zpc5DJ4S+582LWF>bbo+%xfX!#4+H(ZWDX3XXTTR1H50a3N-b*;K4=0!SXIh|0A`)( zBb!qA$NT%QfwId@UpKgoPcYZ9VW-(!7Njo@^w_REBl-uv+W>Tv!6)8)R0nMQHxUgH z7T$DWLHEzu!o92EkKlIH-kUtxffSClW?b{zn>dEzyREm22eosl{Z8gjdLxEhzsZU3 zM>e1DqgOcaW!ta?f-S zXsqsEs!N^TYXY)C|BJo1jH+^L+lIG*2$IqzU4pbU2-4jpAxNVjskrFw?rsSYkdRQi zTT;4VkrInW;+t#V&%NE-?KhrZ-x%LJ#{0*y93t16*PQdr<2cU5;?l;8twMJ7$8b{Q zIuk4tAiLlnK`mnzUt6Ce@v!$8J)=`BBZC+<|4j_#Ap+XNW)o3I-2e5(=mTJ%gUPb* z8d;BiMldiTbfZYOZoozi^2}IMTnqtlGvJkWl*|fY9|C@5kUwCx7}6#Y_BePXJxM+n zIWAJRXM=czA-MvF>G<}Nu54rJojAb8GjJjbx}UElo_9wq6Q|_$fFkocoD~U(FNT1H z49Lhy!MF^a^cv8LKGwd0zIISFl%@hOU+S6C@=lU9rqSQ*X()V8{Wwz*zN6WZzMlU> z=|O+H^|e!XZ`FWoEd4#?IU`?;3S>|%6$xUAVaZUJ^@dX%0c}At;O#?tTq?-=Tc|CO z@T_&^fH7``>a{dL9`;kn7oNaVJ~V9rNIYpJ^;lj9X0~6GHYr1vu~J{m1#n~dSEIy( znza&ssQ%kh6GeI1GEiVACgWSGdlf~O*gowcyQd6w!1)mNrSe=+BW_)(Gfe3*Yk51) z!7gP22(;eS*QL5PFC1o^j-6fu{&uB)_xJg=E|-f)+nce(t{8w^I{|g};tzAr1Mhc+ zAJVF;9_0ShoP#@vuG5^Hgh=m*cpI6iS*Qz6d#S_ckcP@byX1UoLV&>wuZwCnt5-`) zqN(v%hlv2NqcdQBy3s1s0s9@r^%$np@5V_l|Mr0j#M31~1@i7KokPl$t^&JkWtu37 z9=98|MdaB097y2;=U0;Gamz$d*+{{>u-kWMO#UgsW7Qm#2bUC=>yZSbtQ{ywWp&3_PRq?Xi;_m=<;uPy&q?F|VimfIrYYVbtj`uj;HpMs~YUN{P$ zA13+%LjAoD_lC`%tAWl@>vMzh5f0sz*5p(h5GN8VPpv>-fJ{$j!r= zrUhvP?#=*yRK{99={4{srHSGtaD4cML*j2ZgyPhT-&e(3EvfES(S8IDCKGUS;93h! zby&c4^?qM3?*XnHjxvo;b!>tN)x|6T>V!vKh=m!n&r@em&xMIMH8Gjl>XmII9o$v{ zLi9xMG>j&4P6)jFm6r69M8gE5-)RxB0y=G7`E;FBr0Te4H(1J87ZIE2qzWV? zm}o-BW9lH3BKn%R%1&#^XTL0w<88)X-gdm{?5uGNm>C}6`aSc!di_k`k=00E64LZu zKrTZYJbW~P+~8d4NJU9Uda?Yy$OpgsMtn%unMFEn<%oS6qTY>2Nj2TdhA+R^om<=9 zI6d$95c4n7bX4tC6MOpRW?JCi=LL91sd|u;+*|PH`7_D*^$+iPxKP^6)ywVwW-0$L zKtu=dq{EGH`j6lKZWldxZS1tH%0xcbHcSWGLV&du~{p+s{Lxv}^*@-s5 z%Q?vyfp8oe0Ry2Q6i$i-5r<%vw)3yj8=__K#NwE|+8uD@pLm0UgmM~R^nIlap=3Vu zJEuFPzwYpcnK#EE{?^*=CmZlT>~GC)IRXes1GvLG5FiYc0-gs~`~W%B>w4;4m46&M zaJ}Z?vOR+CR`P$UGyq;?IS|Qkny#{%Q>Xj4s#iB4kMleH7)$}GUfO8Cvaa8})_(en zfS`9N;Au}C8<^hyx4Q#hg=h|;ani!+w5de<*ZTthqJP%{`_18$s(}{)=5DH={j#b* z|N6f_8eBN%n;Dm1clggY{27SRm`^xI;BId z8)jiIMH~Q19usVJt1&?7CAY&p+VRu4mMg#$?KS>*#RI2R!zqUZHQ#lrt;gWpe2`H@ z5_9F7e0f!oTR0BL&v1Pxcv!9I@cBg?+AIKaodU-MTos%gU2Y=XC&C4nuL7-#tsaI< zebZ=3zMd*hWR(lRmg0TAH3pjK_P}3W92JNNbK{`E})!qR~^lWjFwC9f;_kB>bor{5TOltb_vc8d#4CppREAM z7)HZXPxXW{X!*Sjtv1jrC)l>-`RhG6;h=))G9m^<9T2RRAe7H&8+}{Ta}{6d$E6fC7a?69(XX_j&i;6$0`yLu*~pF&p7p zVR+xGxB0uH>p)#m8kKAesK6fBJgAf6QtU7f&g`2<9I zYGVx};-}{Uz58BUI`B#$8q#_(%ncain8>dsa#^|(c0P&D4- z><;;4Y5cc$K~AtCfnABw2*g6dQ4Fm4Ug`5{#h`Ju21%F~SrAKE=awVRt%527JSCjs zJ{aw#Gye8oa2R=7?GQ^Xt4F10rMNBEIf{kk5?aE03D?|#^)fp~f)b{BzO+gXuP%pA z^Gxa1qO|!{Ri;@&aga5@?R(~BWS;k1a8^vc`Ls<6{6F;Usho`X8RqwVtc538pxvs{aPy@ zBvCJGjGC@aQi1V3@1C+^H+$nTGjA!mKKB_C>3+QVp>hxqfNpA1JlRYsA2dY=)|Zpf zyU8A4aIex~6*gNN(IZzuV*BwX2NqhV<`o`U$-9s5hev%-rr&aP4*r`fS40xH*KT@x z{G)|IAqjh27ZADD0ppoo(+N&(AA;PX*XvM%3$fw0&-mNB1+2Wr^K&-+{ZjD>p8A)}-18#sie z^?i+FZ-3P52_L8_$#fDMi3Xl{DRo~}GrCmw*kbC?3pq?bpUjn8cep57Ex0CQ<6z2R zJL{#yvhaczzIH-;15mkFX)4Kh;?rT#G;z??<_N?Dtuf{9r{cO3ADJJpq$qor=U&Dv z>+2G;mv46yoNrPJ;8m$na%y;}<6=TiI-biYp0P%t59p=%YcTOA-Agi4L+Mb1Bp6Lz z>Lr;w_!-Y%6j~Z*yx!ndwE`KuN286qpQ$(P2HIzCmuh|T%|cl+`iGmv#vitv$I4lz zrmsh-w^nR_n$deK>{q@?SFnS`gPZy*b?7SAGGO@_1J$z!gfSif8mI(vQIEvFx*kAU z{xPd2&!Wj{9dq4Aacu|mD;&(!pi7*4fboxH`vm+4`VFavn*}0Q4?H~+i&4sUfN|&W zW=`aNnEVO!CoYx=o078ofx=tL-JwoIETJgN9UvXS#fsBi-Hd={CmHwdp34uBoAsOh z=!vr(P)1yhWUlIuLZ!w8Ebgz&+@>t*{cuI3H2l-16`?>4k8qY~4Lbd5X$O!*8L1{Y!n3XK3UqdyZ zflZoHZJ`;SiNy}75jcSQdKXFcQjkFl>{ zXktAtp!6-d?9B~8AB+V09+v7J5Fzv2UZguH1%?SbwJQusJDV+9OO^6{N#9x`jY*;Ypf& zZY7CUSbqr$`n7T>h`fcF&3~qwMt*mX_>;?Br`yaF=)xrDYMs{Err|M-nJ}sy87P`w za_h6w4p@_vrchUCJ=HvyDc8TB4yu0feX14qJx8evmjO+i026k;TbLX1zUxkMg2QUG zbi$g-QUIMgrFJ!34}_+B= zD*+=LS3vG|os!7}()8QyM}$?xh`p57Ps`~yle{GZfDDwwp6Oaj+FG1)9G7q2Q#@Cp zz4}9H^f)wP-FC%_Tu1%`8rD{Lt0IoJrS`ov`;=cl5^wNXl=+z%l^J!QTwN3^%}_Ir zak>Hti*|IrmL~|mWZ5ariJuD5=}y2E2-(#^f#$FUDoDU-VHu5Yj~CrCRrl^Bs_ z013=v)vjzfpF0v;4X>|=EpisfhL&?W^^A4~*K~-dB4cj&%^HWs6lc_eqCWo`bm~KY z^DLxMdTS-*)DT`vhWJ70>))kZtV-jw33HlEdVRBdj#za3=P7N=yoDPr7@!rG z#&Bmafh_g7((GzG#uH~hT_@N`O%s{HH_9MtFjn>*|9LcP=p#?W)mE7*J_KgHgYpjb zmti4br32f!Ij;|0%r11*Y|p#~YmSW_{WP?OE6>9{fl86)HwP8F_hJ4AZ0jx^Ix{(< zyO#T&x*A)X(tk7JwCo7x#sTz>0^fkf>vag-b^Ku@$}7$$&QjCbRj7{;C)aTd#I9eD zmRcO|0V{2iokW;55cGLDA{^%i9IX^`juj|1V#dnVzGwga+zr*_^ttdt(PJk)eky!bj=B;Q)~B<9aAOY7)aLr_#ks^BqZ=o*4ol7>ef z^id}LN3>-Wd(|f%&;^c78f+P^*l1%D7rYrY8RTcE_9N80PSA^3MxAX{F~Qm2FT8S;&w~xTI>Tr9F?db|vyeJUL784G-5xAu(iyfnt$ao=P)n#AkJW5_D2R zS(5MoU+zQCuY-xT7%Z?VJa&t_DLBD?I zm-Bb&_m*6!^kVwM+&*AM+&-*%Nz*XqR`;r;+nl5tsV%ADp;aVu7RY z5}Jd0yuc3QzSp-Rv#Mi6JG39vVW;o=&5qLny8gyMsm>nHtA|%r;re@Xci40BD=yXQ z=u|tPt|+NAoOZs_{2aAF|1+Oby^RyyN*;`^BdwQ=-F@6|ZJ>#j-x=O}Bj%&w;=Z#F zmy2~GSufimglH|E?fvl5wnrnd~papseW z1un1hg4mu&P2)Wv)y7 zJmoC+9L{riNzU5s44SJ+Pdks1J)b*LF!RG%@&~)c^0htmclReOVS7H5s}t{@Yfy`U%+WR=XSy3z;ZCO>DI{7!rnzIcZU zBrSVs9k&lvPC)fmDlCiTC2G1|y0Y@UIm$7~!#ca=PK6B}c`6PYo@tqQ6fsHBWNZD) zsKC~i;QMQP&%e8zblU_R3obhBvCuOH{=w?Z9Ggic-5+W}(q6r0X*xS%rtsMme^{1e zaQtyYGH!TX4qTYZSO^yjuKE$&H3t&T)pc{n94ZM?t`j*%p*viPlzz^W&jasm=IXGb zZ5n!jWY|esgUu7SErgO@JX);wWKa|ROeWEneWE&%Xy_v?4qnS_2v=LA9u(C;^$s!> z4TeA~*}Z-<8J)nAcMBMMMqq{)@*+IzB0=p6hhcGM)rlLRV}^oq&YRt-x< z*LjGujuLVC$dczUw5L*YadFT@HzjXIZOS~#oa@XLK{bz$4<^Exaftr<&h=dtJ%!_m z)@rtnF@b)X`nyKzq}BF>nFbSFR+qknim1(U$Z+e9*p|4eKAF@t`kaGs<}k08y$uBw z*N9)hN)F+jQZ?hYGvN;gSev{+GUkI@-GX;MM8!yg-9(&q3+5BP%lsn(;;7FkRn5*O zeP0zi8zp3FwBaL4!r?s-fZ%i{d)iX5(JSUc@EFVvVx|WlSgy ze#%po^Vo^zE2SqdMM79%T4S#-iM+)Ln(d>6ZHs)%TQQEhAc8Y!l{K5Qq(SSW8yJlv zCYD?GCdXW3ohEWPR#fv{KQu?;I%%yM-Jp?@pYNFZh~M6B!f#KI!LFkfgGe^6)4G^f z=)iJK#!cK}{iA)RH`{=m} z#k-rQW7EBbL}8HF&)kONmu{ppJY-bv_C63LLxTyA*Sg%n%#cV zp~kA=-6{h+-f20`z8DeNt6~dtGQI49HS7|4AL3ia*eIRDg#C?SXsMG44whE$_{Has z+vzL3`!SYSVV9F{d`=QN64og77bM?>>vwd}PF`Cd5YSt#Jhb#5Zcz5N^&N$?@i*@xmhuWx2zQrwcMipDPa>U=D zFK!fCeq@tESQ0vsJoAbcui>%Q?#H#jmwr^K&t-lu*>MQAhyXQlK!@kIsNnkHzzpqYE4 zee#1&Q!4;E(ImkHKQQ*Ef8^F&N5<;tG&&bQY&v#gruo=0(mNcJTE)pU&GWUPD#`N7 zZN}I;-nzb>EhC|Afh+9rOfXQIfE^<_Ei9_{=CR|U_g(byOoFGk{)T6uQ8Ge8;yAS0 zRkS--=ljOlx0vON`68zB+Uur2O+BGkaL0iq*d;93-%n5veRn@mxRMXTP8}-+F}yYG zU9lfv#aeZ7SNu`GpvA@dTnS#gSM$JV{$FZZLZlG@(~`Ha(4oAD#{Ez_L$ok>EM z8qHHW-=S$|_`OA8vLAgU;+H^a7Kx+<8(`mwa9v7%M0lWhHw@ybe@81nUiJFp8+XHo zwC^uCT6Tu(QOc~vjtUlbLx*Td&1XNVp}3SIJ@Og$BT3FCsI)?{vfm`qj@0no_@hQc zo1Rru?ja|kv`vtApyh0D{PmdoK$+iy>%ZmU3=#)4GcFcYh6^BBH zHT|z)I4fM_C_08Sy)nQ{kfA)aCKWHOK*fU~hZ6Q$EX5<;sG`*L!p@Qy6f(Z_Dz^ze z%tOj|wZM*jM$@ltKInk#vTU<9L_UPd|8hhww3l9EB(0Y(@?Pjty#zXM4tu;%d}O;D zX+U|eHE=0wyVEiZi=tl~L=DwS+zg;pUx(!wKh6_eWAL9*nuGmq-2RsM=kD4zkVID= zAAT2=kGQ-%4H`6#I>hMUGF`rD7doa^deTq4x9w_|gSg&pW7Si~48Nk!z;daX`H*Sn{7SyuCoMl~DGdJaJASrd$dq3ws*Nq+Ju zf^QyaH0hBg92f8&Y2HF+vOipWL7hoe*Vt>-SRdX-b=TUYdz_c55jS=$MLXt`;uh+? zTL&KUg%*@bEi#OWr-CkAj_;%|gsX~7w;5Ke_2S0+kkq@e_!j?|?-wVl1c)+H9D;&^ z3crA0%&`7|A2#mKXAJnW*ow3oS@lF#^=u7oYuZ=D3aV zNvq^Dog(9Wv@|X6FHN$Zor&J&8>*;kx%{oY?lEk;dI75vq~M5EDCiZ`iWa1OL2Wi0$7Aus*uYt;$4ARgxA^p1M5`G@@u#~G~b%>EA!D+4YY#jF+nyRJ)V!CR@8T_qY++E5H#((qYb}_K8{Ce=ALYn=7Onx68HdV)e2y;N=g4$@!%r+j183#f3lG zQ;6BaZjpP-$&}-K=*MP3I7VbTX|3+h2<5Nt4}-=f`7&@JQNG{dXjxHVDV{zVPBcq+ z7jMNhb%6@WVqtRD$WQJ_#yZW)ZFx@q$Tx(Gr;_uWO(U6q^mT%MJbYtkwOxa&!S(y{ zvLj@#oi7Ap%FyNDPK0Gn|N2|?!o4K^8TqP_k-;R3D?R>&lVb}}f)Q<>z0kK{HnFbV zP6$pP58<9$DMnN&IHcsI{+x1?H+`OQ_)EvO7xiTMg zAurW1p(;;xp;GWNCa2SWd?|T7Ugq(s5fV$+pBXZV4S5^NY&&KZ1F_V;ceSJz73yYc z`M3qmWAb)(8mO(EfeItdfIqeOM^FSSSt>oGccC`ZPa3~fmluY{vwXjQJVb1nMOXog zcBnHvd9hL`<9?638(u5D-^}tW&i7rFr6juz3;s_twLW{)vg!XQu@M%Z{WRe4s0Aha?jWtg(Gw9B? zRz#yi+f8J{56q~%_dlv{5ajhm9SBrt<*<ZZ&!c0x+EP5z? zR%!VI&7L@DwYs)Ib{8Bo-tVUDq6&|WPF$92X{QUk-clc@`7wt}GWBYU&_Uf<<8Z59 zrkn|ni?UKnnLP8|>3e8!_zu$hvW0%B6jjxHWeT&7*L{I+6Jzji%33~@t({Am3~a;d zzJDq(gNENUmYinZGH8W)UR`p^I%e$OJoUTYyunOjtz7;xis7AbRUnyIWBs#x?Co)n zh?a`}<~Z&1OAhPgS8`#iz1*mqm;xJ=*Gz+ka&Uk$$~27Ax=Rhci22E72_HVq)GFJF zwe*$-U&FGAKs3Tp8dE<@x90}$?TYtf zB2_s|YNXdVF*!LYPi6odG5$T>O-R#8<&$+ebJmqIWxPcvCRyAqdn*{KvM)3Dqz?s0;l!Nt@ZK*tsR!5rE47L>&cBHq^b+$&V| zdR7XbgPJ+*gyL8koG%N23%k~2a_CY(tPN>M){2KIMD*Z8PJ8E6Wxv;bx=cu& z+nt}to+=(=1}c6}Dyy-0yAh|s@RqxD!AnwML@i2Ly)_j6=k)ncZ*Q_1?3(upVS$l8 zeE!h|k#Ev8=PkEp5}R<=-uJquT=>t9-R~$Xy3DCJs{g!mE7=o~F)r9szFXPKE!A~% zo44|ybL`<%L1YCKfBXCD4Ww-gNS`v+ch;vJu=yOnuw1)F_WbnCTCK02^u}GUGCh1} zRj(2Nr{I-aJm`e=#ky>xg#S}_G|kuIUIKcDnFRj%7e7IeMqVg*clI%(%(-XKTU+p= z4xE3)DhU(~y$)(Zc_(~1UYO&nc4?L|`yDjq2iq{!Wbz{wN^)N6EbY}8R3|i%t21|FPOBo(Uq$Qx>e42V+>?wtTb{%eRL72W)f_#`gf29=lKO+B8Q%a- z%!%7lP)Z5Qo@qeYqc3LX<7>BuW5$BA`H z3BHE5v%mgmU>r>M)^sf)@icEX)4YH+P1UQmWIvz#TGwH-`-+&}ush~VhQXLsf+NOW zw-K;F^0q;@puyjqec0kF#mkx|%KWFhr>l1rLh8mZ^6(5T!fWF!cVm ztvT_>KH*CW^|;MZX<}_FQ$%Ku^}HCjP@sd-!WVi&1HD-|oZ!Yho{(B;((PlhPKK;B z*`kH_FGRIw!=&@)6mAHcJ+1RRQ`SThmNTh)*QQ2=y)!NgZqL0{=3m205U2+rgTtH7 z;wd5LpWG94F#c!+7k3KWMDEw!IcUwGht+53b*}tEOkCi0`+^Ld=zo^_;{5qO#YKNX zvcs*IXhcjtuczx9RNV_k!B0jF)6Rb2$H#pwnHS%0dELWt9tj0i3tG)SQGVz_f7|^O zIpWU>S`(eaV?@_JPXZt4%J2ae#r8-wEI4dV3DJ+=U(@ic%o9Q4nq0|(l{K-#03k~U z;fp?^>qcUG>(H3!k3cL6C8&(>B&R>wEi(asVpO!gP+X?r!{}G`c#DlPVu+dTR0B!L zgW*LNqh01V)|bN!H6a&0btCuR1r_9&+TLF-Dyg1+QFN;B@_O@YD}ta?j6Y*XkbdYn zh8hyOgIsvB_0gg)R{jd@Rm74zXv~iMP+oepFZAgnv{>z)98QeGF-ktf6urb|)qGU1 z-Bof4@Y*BZMvujp&nT27zk$X-I@X1i(59{!tR3glFnMyA!){$y_p^+0^J(@qJ>O?B zNe#_!wT`WeHL18`niExpEioUqYva7PJ=E|r{wuYPsV=g|1N#lohNMbL3C3L+-(}`l)rZz*Y2(e!CS~%vTR^hng4MTd9$kMg))sHLQoIt| zJ->gT7u}e%x&5_w?@|P%iAi<xf6_6S(xjW(paMp*Ubv?&h%I6C zV8z#*izr@y^)c+T4|0B7y=~(pp2cuu)l$#w=5K|5zS#^(+*+zN{L9B=J!H2N?{D?N zU&}8LKFG|zobzbI;8n;?p6prRI`8?qY@63Eex3p2!PUa4&AaKO%L^r!RgIx4rVVhT z=K?*Pw5GbqjHhQ?~NrJPA=`6qLO7yLbF}%j!siT?p5yaj5s751FhAV zdFD;s%iAP6Uz!-c>(`QTRQbjHuflz&?+FW6XsCY8lER?O!ywD>J%dhf4`EmH% ztewtJr6`gre0`7hJopN>p^litWi8pCD93Y*uYOkurWy25V7lCkGo6FcG zoJ8sRWQs{iHQ^9K>87B^lK?R~GU!q!*D+_$m~fhn^Nq)AcyQb31r)?pvLX;=_+FBmVpA~H+eDwK>T@3@p=!DO^ z6QfBLUOFBCX^eWFj76X=fv;RLZxDs6Dt_St`%-!-9~?36&t~osKM8^e+e*Xlot{*q z8RYZ#UR~1HgBHn13017!veGEBp@7R;Y|*$cOI&0m%UiA$-Eq=OCMAQIbDP+F+$F<7 z6pmDvxbZ|rGMkecIxTQlnPXKLOs^&VGR4y8eoXF=>g+Kcdwk;gp>>oFnTO?jEI1f9 z@Y0r5LV0c$@ov0ZZYp^z9Nc-4K3`=LRPkT4HJbmJ1I)Vr0g15h-@eWqD>6*cE9=uk z5Bz|le`Y)7_7P_e@jO+zPMr!e1=v-fF|DVq5x<*`&=!EPVmo+=|1Rr^D_rJj!@B#m z`qx?%w7YAgT_|@@bxbs@(^@^1M<+G`R>Ylh=rxC}Y2*FlO2=45o+Xj16-NBdH+Z2K z0^hy^;nyYNA5&RBXuVOk_16Voz8Wog4Ql3L_G|WN`iYPAf9y4R&Wd@sIF;uS4Frj& zkw*smvAS61BMgs)q;4(cmKE}oJZ%{6M#Gj{dw(}j>lwtX0-DsveVrpIR0BZr@DA0i z?R&q38z5~>WyI8!LuPzC1N7-4n}ns7wzS{nBiehuPs=?RNwCeX4?F=a$w7|e=Nd`{ zJkK(Y;#Nq9$6m^^$-78()v&P+(4D>Hs!40|(DxCpFIRnNv)?VM-Mlsuy4w2@PjN4Yg8UBvId^I?NTa_OI@JF;`%UvHYSqHOA2Iw^ek))_;x2Kt-E?0WO#XOZ>Ok8Z@L zM}2r>(Z=0uVCinR-$F*Z7c?G^Y;{tY&6+%O+)*GsXnh2miBVrpM4YLw3DaGix&Pgi zp$Th3$0Qs+48M>k z;~MIkmzoLcDK0Fdyo|3`*0|~2iY&>wxdf6H5LXKpHB6*dQl~dg%MfkROis=PaElLS za!D5~cN6FR-O@KDI={4swkxZnsMfA8G#0+EA0A^{1^1dJ5mNTHI&CU!$GrEscSE`vC|4x=+hUa-+xxaY{3 zN9FD0^5Q*%`Rh;QaFjX8y`ZF+WR9G#1_IFsi%z{tqh_36razPWdCxxqTX+C*xRiO3e5C=57!mFMR>B=9JtH!6W#c?Z5mQS2uS*VRApWK_gDMZ z58P)o3Yzl9n$!DvC;w^zfG?Vkgw8RuX*H$wSKIUF_qugkK&2VG5|QF_&4<6d!rv7M z1W}2Mt4BvGN`Ar$|Mtl#%ZtkV&SNLr-wkLg z_f`K4kpK2mL*C$(+&>#?F#GMQre(pZ(lZile&t;J*&4}0yp7HwxS#FX_w#Xo`&1PR zRwXFRGV}houjI~MBI7rU;-?9}-AYS_TjKq1Ty(27|NfT$+mrFe#A9#Y`TuUCla3!S z|5pp(|8Dv}j>i9=H(h+XsQk|ZV@dHRvl77Q*S>-gXkZF#qg7_WjM_g|0?sgUnimz+ z%axV~j3-3)ZU#Xb&x^6bCw4Udw7&2QgZQKw+mTY}_6+#!E4K*1f)`-&O6|LB@j&`mShsaU${Gz`cG+CU^+g5OIC7x@&afqOy1t`mG$8Vv!oTd$2H)C%t$vaJ}%Up z{%ckr1-Kt4RHd4Lp;3U!j%ZFI`55r0VvQm=I*D>4ufJbdttO>QEnWS3uZKdx(;9-T zGyw>D6XC-u;;n-Bz6Nh{V5v`%hVFvf<$Z&n#Xz5mtN7^N z!CDlR_}OUr>dniQn}0pT9bC`d8Av{`IV#wI613yJ84?&wkcm{@X=O z!9ROb!Sf%5b{EM-0ISa@PoS}2Qjm^5y;~IjH>32Y;q>yN$}jT;szZ^3god2mn_&?7AUK?2D4wH)LzgexH z-@~aC*st}+u6w_irl`gSt|+w2l;)Rb{>Mgb=!0)J=zcO^_gkxMLj|ldL1d5AuP^q` zwGY*SElmHh)8P2eo%$bBI4uTN#X=q%{>vNrkGINlgj?m)DcAMiuByr(tV&Oap7dw> z^5--E^M~j<+$!%@o(}$YRhDRARn>15c+`Gtl?U;OM(S!FZ|wd_UjQSMY{7h=f(#d? zxmkcK;{nifwhAEk2e8ec878u^Tl<6e2=GNj0O>L}oJV(nDFE}n0AP6l)fR=72*Fna z2v@a#&S3?dS0U5xng#voIzamXlWK18h9j}s<^Sd8MVXPLet=1Jy7u?Wh7%DCJSq(kOAvJdfza;RW0}!TZ8+;It;F;`z_3n8DU{N#< ziJ=`!KmiDT5-@+%F@zP!(Kam_2J?iiO4fgKWDpgby$cb)2S1w2fP=IR=#z>@z`*DP zs&z%ftQF-+P&Ec%uEuAk?2sKvh2o!0%>M~+yp^y&GMKxHn02+wiA`X38p zG=aMfXPyDMM)7X1hCt6A)!+-+^Bq7=7lK^s?lW){ZPG zh5ONPp=$2D=Z*WT%%Iru*raTS7+LyY;~nX+Q4=guA#Mb-Dl`me1mJkFY^`EzmcPO} z)oh?vUxxDr4FPowz>1ryypMyJlr}3`AcBzDsz8bt`9-tSd9g~m2P&341AoxQ|H(sY zkU7`LZ2B1<3&mK95>mE>Xha9ZG)70Px$2muQnWsK0~7C$TfmC`mJId~PFob5 zqM!JY3Fe~2Yw13qmU#mo7YOtn^qKepvOW!HpJ*gKj-iFiIp_k9yDw6qjIT&b_YUb{qN`6e$(zyUro}}4yUA)wGz@RlDz*~QTd1460i0K4ahCIj* zx-jt-1Y08Aqu1ka!a#b|3FnNVVTM2AV_Ndgu!A$zq)r@# zlY*rg>mewd>=kwN4j0tLp9FLo0vn}@MG7Wia-bSewU~-v0_~esV5&%L2EV?aXLS}d z`wGbCvJu1#BQw$DiJL3n{RB7;AjKj`grv!{_V8Ri&kblrPFFj>bpi}+;AeB6o(Ew3 z+X66K7%qGSO6x#eOG)s%h79|IHvq-I64gW$&YLtU%gzf*eJQB)(xc)KFfKAz*lQ11 zjrPWaZyi)F_Q1bKz)jqa{crB{MgVXtyhF%C56~$x0Mi_X3?Ka7E(Nr2Xb%=Zx9tji zlf+!Q-qAab;HJNaFutJC5dyLmGE*ZNZc^Qkf&}HXY!!H>Oz|i$d%0RPSSsJ(v}tB~ zfPr`pghlAV<*^n?`?{6A{I@lSYozw!0weC8Z zFUZ6}9glZH__$S!;;S2j!F{+uksDTGKJLrL!0$r256PBkFh6|AIN=>PQmO;2%tMLz zs<8l zlkVbnmOqh^rse?<@R9JNc3kED?QBGv+!$G=&FbVmi+(}>WY$B#LJ7L6ioL2zPEfbl zX#&&O`mP98ELTV;Th&;Vcaru@DK8DLVT8!O3jkdTm)8C9%qr=7LkAc?xsN#y|`t%O4XEUD}(h7&e0hs}*vjIM3KkXB%cQ`H>L@i%A0 zQVzr|4@$jtk{+tO)th)BVSYNzeeyawe`~`cN<=6>uZk=x$Tm1fTs;Aps#qmo* zO0kbii>6GCROC*Aov$Es9gCQWsOxd|JTRF#e+r_6dz1(L1vENfO8o|MUyNcuzUa!K z_(Ku=Ijda%S3kqezdm8yt^KI%tAhA>@?mAan6-3gms_+yc_QS?rhCu(g%1~;4y)ZV zh!2_(1k0acQ1l?l7Ucv|9uPmpx7T~7$y*(~j`{d(@lx!(J|yfZeQkfSSunXz9bU6L zpnDYjVSteP2Ql#>i&P^4mvugZ)7LySrUHrcXS0+cYYa@Z(A-eQ?Gi^ph_vcmBoZ&x z#|v4_gmB3ijha(vdu9lke?FK?AjFweuQBkf=QgF2#Uy$*DYR~xB^Si zjoUAV@nIu9G6Twps|dsGxEoBw+pSE-V(ldw_w5in)~Q0p(JjxA>Lncq>;i-<7CSrh z$oF}Qwm^IE3y3y}{N~@%sROMTpxc$2*cF8?U|vPW{X&Dx=3|RrB%pcOQ;E>AJ-+%v z@q%WEjK`V??L#7Dv#L*7zx?9?sK}klkvuE>7WuD*4WWW{eZPv^RFTz{=99thY<}TE zRM20+e#wqS|D2$$b2EdOS5~@-@Y&n!D7W}ZNwMO~I597xz?A8v=K;h_y`z$k_EdFL zE271#GFkoncay`@k`kY5aN{a=l8-zS@PplZVV!)73-j32SJN2<<1nH;e4-QO0)eE3 zIyPJ?DEa-1>J|{ec0DLaYrTN3VW-_Ke<#gCd$TdjdIVt>OYXhOG(RIB;$CjIda3IpAY(@urcLvSdqExY<40e9v)he z)lm;*k!#I8piTNMlvkF}$aH)qL?*hc(SiO2;EKN?;6RA0^4Cmd}8Jxp-m7aM<Yf5^hZJqCT7i* zzE*WQ3-l6w2jPXFmS&D1ODN&E5Xbk`DGpb2a^pt8m4wjtZV!heA5y+;Jo>t}{zcR6 z8>JUqF;0f%@Pz&AAIedU4Z~sy#$P8aBCmdDZiMmdUc<-{PS!V%#PxvzJ|wsVGHmp^ zy{A!9>A>-wXMlqfQ-}BTme89Kb+$eFo~ke!sj#42CobClW!-munHdqM*zRkyssKGrV}A6K+uZ97+HL3ZHdL*XgeSvT@MQu*S7Omk_0(Y2_{ zM44z2xF=BpRJ8F!odnDnD^^uTShF*T07H;sdDew_53BL3A0(fw?wBUF=SLfH%8Mpe z+Tc)f?h3Y=y*C>iz0?)jHYZiskkyx$>c7HypC1-4w?X_Fh%mtC#H)(XS+OjZ2lBF% zY{i}ovAf*8+2cwUF+^Zg`du(%Dbbvf6wZh-%SkJNBund66l}#}Wi0KHbT8)CFnnU> z;WymdBe=0Gi(tyuCSYwQVh-aT@;03h0NFZOEg1?00Yqp)yO?NnnSC5{1vU>ic0dL| ztiUNd9&)}j51~Mb?r>;@Gv4B0N8sJ3Rf}!$<pTsNw4H@#3WZp!UQX- zL2_y5eVf2&jtb{1;G8q`5(;E?UZ;@G%0GX8Aou@Wcm5SvLiS#6@R#s znX%-9V6A}^Ln#-kE}T~dC)^hCQb5!|qF^!hb%kbTjXOxwtJ^1f6%Q&xhe>bL0FVP} zkY^ddoNx@j{(pD-x3u5p1k+P@)A01_NjcyxsWxVQcR!h8DB)7l78Dal#w2<-!lQN4TgY}|3NK%&+1KTRRo`{F9uH#?j}NyvYP@aQkm7Rq*O1w z>G**u_9XTK|CP{phHtbw4>d+&BSD<0JNqD<3KB*&6SiguKr-D5o4&HHuwv}}jxM); zZbAM~3Wo+X)>>{$T6x5IpIolGj_{0T$JmbCuP8!xynCOW zm?o160&`*N}zV+zZ}{Pb%*&-=ou=)MZxB%sww|CQ|ixBi7!h;7FQ zxvk_%MlHs4l>TAjm0AhDmJZc@I=tK(lh>sZwojnxVI7Vet;vz}rOxq_hBCXxo{bzp z#R8q;vCxA0B>i1ll~5~+O@hy0CORHynW7Q{Xswr!Xn*7zlT`n1VyXN1IQ6f9_=3NS z3@zF%irbTiptF(}bF|!lC%bFYbckcmVj5MKzhxD>0njMniUT_`nGmtfV{bvONf_}reAQoT+fW0coqP@tqiBsB zvLRtY$l#3PNmo@oI2*4TV>O-lLdVND(UbNpQtM&x+RxFDIB_0XU{LX-dZAw~x>XZ2 zJiGV%}8B*1t|2`YYQMZ|F$0-X+1<5cW}Ixuqf)O_qkT8GS!A12>vYxf3P%c zlrHUKbm$CTN8I16PwuVkSDhEDhI9cMG`mX@lua!=>>tgq|H%wkZ%c)8OMQO$!rPmz zS&YLeg=Sx_I53t~5s0u^y-p12#I0!nt@JR$eDr&hAm5k6Y6*d%(-xReCQ@GIu|GGH zT5q%_sVe8n}G2OZBH`u#d-V^%Zz z!=}vER4R`!JYza@2#3jQ6l`YExUuh7=%|owRVdxvAL@BjlVpg3ZR9JsG(fX3TwG1af1_D&V{e-D*K%W48OVHo=uTeXt7Q3t zijLsM^Z$pvw+yRlUE78g5ebn-5F{tvB`pX@Hz+9$0umBRmxwe-cSwqXltD{KH-d_ixZ}Fdy1bqr+fzA+cl=t~-~6mHQam{$ z{y1-2%fry2WyE1QUJ(9KA4>WGbGDUK^XVTMD9M0F@VOLbWsm<6WyRi1g6-U|-Rss# z3<`Nf|NGI$o)-M~zixQIHD>O;#mPfW1mrn4RqLU2{NkXZN_6AVe=wAPBjRd+9Up$&9t9Sp4ZQ`@Uw^>ahBFn&64Rts`WlWYy~21pOZqx9egb0EA&*JOTS{<2g5e z;30kD>Z9LVZ;>ohyT1a4GEnDhNQi+Y%`vL6F-?Y;OpMdhv4uDoEJ=ZWpA~{g-R# zUzfNk15<*@GxP5~w*IQ2O8bibf>hgQYpv8-k(-0nI;H=~Ec^E)6+v*hT&lN%u-^Evy+5C9L%a(g zs`5a%h0HwS>fYOLg)!z^--BM>IyS2Vu5p-z2tTEMB?#>GOA-?XYEF4?;RbY~$0h~% zgXrb2@fw~V+t$xZ!FRj1qh8AZD<{`!Pc!GGP{!1d9p|>kwEIiC>xZ7CM#mNp6K+`8 zFBB9m)xtcKzyXQ<#R?NpnjX9%nm>wSSY5c{;cuxyUqUd=cG9w9fjj}&gK{aUhJ-ihgejQR{V?mQRAaZw~Q5~JolkSR!l zyf-8!gOXw%a`?nx0BZB^f_$SA)UzO!9cL*>HY@!=N&%yq75nWk7i`!EXTWcOFL>xI>H9ygUkdQe}937pZjogRNfEqi5-WqyO!w!0V zwq0O9cdS|}c>bbHkvQZxf}SMlnuRX+UFlg)h=kMaW7+;U3eYXIh5YtPg2+pmzt5-a zx5z%AG~l_X=f62s0N4T?pE4nW@MC;Cgd+n-r3X3ru)bX7&8YD~h(oA_=4qu5dwDQ8 zps9OigyBQ-3Z3UKoi@8OE%7Kz zr$&k6o!%ZF6}(}L$vO$br{nql40N@Lx(WwN;vFEBKf6FGXDNP`W4pWSUU)G7#czfGl2fqphoLn{HR@)V6M$JdjI?D zPX(FA@hQH0OGw^l*rOYi&)h-}BTJWGcO3XIy(gN444U7?hb?z}r$dMIiwaA)`VFDC!?G94i?_klIh0YV&JNH_V61PISH>-oS{P zI5a?lChgYdZ;V>5T-+DUgkJ;%7Y%e={647eRo|fe)n`JI?L}{6g8qO#A?^pzv-7Q= zjJzUw#=R{%f}j$9mU(6sk6{U|3!`&YS#aESbM1wW(4?A2-5nb|A8zFIFV+wd8$mhw zul!NtEre?UhLxdnkOf(m z_q`;)-tC9?3RmE+tme1M!nTo+!2is^w{XwkZ$o|27NsCM7-4MbD`Wb$(VtFJf*isNHnLo>%Ij>7wB8|;TtaC zA7mRtYh9u6sqbLnPZ3Gs7Tl^n*5s@&8d-}%R`b13KSw|a0U4|wB=yiq0pXb|f}u7dzTw-r>xAo%NqYKdZ{T~r9^;@N03jiBc=myOKr?p+rJS0A znI=9RLf^5fX|w}9vgN6FjrxfAB(z87Ek&mu5?NC;SWn%cb>P3%DHGM9q z$UI$Mjo_f=;T2}w*Eb?~jwSbi{P#tDYuc9QnXUs0HhAq9I_Z+6m0PkdP+v{=z3oIt zu}L5b&t*en;^Af2og7gLJqsgV`$4SutHv6c*7NBuJqp2?hQ6Q#Kvn>oU$kZ&0FAD} zKvJD96hk&Hf07!F(&&vgg%U|2oMbZkMxv3uKKtYix z`r}<-W6eIquzLoEyCJg4Af5bq4jq}Cu@B&1=|)fWo`W4wN=m`@t3j7Gq0Nf0usJFj zO3`z;^I6pH&!Lr&$h0ap5d;YuAEkQzsQj$TIL0+O{gH6%r^*8H#Eo^ZvmC6hwBCgr zYtvKJJ}Ggret$1v|591@Rh%lNA7`E6ufmOWZEp28&ri5XH!nY>J!w}3Gy3TF_$oWcmGpGV!nTL}FFHtzPR-@xN3neL%lG+K(OT>GwhTxy&3ajGb} zCMj(+D8H#CRJvcv-9#g-O=X5ghQ+v0_5yq3Oo8H}tH+R=#GJ@d{e57N$`v&$9y_>3 z$f-6t;><0*Qe?$;tPHn)l1FVoUjiH$SIK*{CH*w}H{{|Bq3pYDosXrIW$zOml?J;; zbNc{#rX19M4(sz$V7pwSQ=LP4+69us*|~+ws-m4*nxxn|eyA~&eT1GU!f5T8#L7G0 zRL)L0H1!v^_`yyc&pLZY_4We7NkTDThWE;PH0!7xcb$t;rh?!%VMGBwu*;uN&0RRv z$7`BZw4>WXM)hx-Fqr$|PiZJoz2~v3nQ*2?(V$baf9~o9`{%8;U(dJ{>l<}VcL5}3 zMq0e)=EA8V$aXr7EJsV0=PCBg2ohz21xpC)%ub@7=WYUN{_h3G-o2Gkb%MTzp zH;;v7MgwW39b;0^==$8yGU3Juqeb-TnI-k9UjDp{4DpqAdB0wH?xM(%7#q{U*K|yl zw?{V+s@}diYwqeNrx08b7I5oNul1BOm8tHT%rlH1)9jOB!a{mxie=0eDj8O2ye`z(AYQCN}F13;pI-ftl2WqfTBx*!x}Bceq{?4^0G zi~l8xDHgq7ZUx!}!CaRv&mDmTV?|?CUr(ncTC`LsOSw4L;07YWx=Zvpx`nE?OEZw9 zrt4CsMC!*hME9U8INt^POxWPVOlZD=a&QW5mhWgcIr5I}Lv&QVYPmHOFNsZPJf@sq z5G8niEK|KONi*Ss=GIZt9V^sDoN|P((t@6Jo@2pF<0;2g*}|DHs#cA66Bv%GAd|Zx zbji`idosm0K$vIOv*7eh`~r@A`K~WwkPaH0q2FcX6Kj?eJXZ2x)cH5HpF89$3%Z4~ z$xTDA8v&&bl?N(NVbHnVFj->?C5mEXpYD>AqLwY=>q?)C&?+a>?sRT{7`;Tlq%g|7 zub|!hF@2VipibT211p#8%jR$d&uD3VFvVsKy>i4vM%Pk`Y<%2}d_#euUR3BDj#C;z zIpHHh*8KkFc=Ra{QCJZUAXJs=tbRiXR`6qGxpuWsx6kB3%9^V=Eca(` zs3}dBG@xVt!Yp&h6+r z23~m5r6ZYV^h&b(0N72X?t>r4uz9i)Wc+XSkCd|(!NNKgjb0MNY2}2q7f)i<8zJped(F23a``|#ARF%z$gWGAWCXo= z=$j%lTpx6{29{?p>iA!wTMisHvP$wW)_UJunc4(X zk=%oJYqx?{OIF=Ixs4`mJe$us%&U+7euR|#jc5~A%fBrq78>ajxWHTxC<-(s4?=eV zDSpTSvAM$ZPV$uysde$7hy0h#{C7c)OnHF6v3|>8aEON^YrwL-j`@AnK-VaFU~9Xn zg0p#zSjKnKOZoSaN4ioAhRIt^*_l{)kKbC$UZoS1f7G2XVDg~)>hf*aYX>8VlK zV~+I)bVM)oOsR26UU|G+SDi*#y|uv2aSd_ES^#p1PV&=T>xS2!lJOc`et)}`4M()F9x1D}AR)QwG7n%|@ zskZp&$i(H%vaiM8j4~@_zgaeYNAoFk+U|jxr2X~C{5LVc{S!O2S3@0#iM5n9bYsIi zvfzlcJScw5_|qo~gVKJp#BTYBgeE!HcoxoM6xF&{e}hS!IYK_4i%dE__dG>&6!Lu$ zDZnCE3(NUFB&xu?@#>?nR`>51Z0r}G{}{c=Ir%ZuGEt9FN?G%kueJAbYsYktKp}fx zG=+KZ27WpB8OHHVA)eLORZysNA6aSPmfzJv<5+A}ByLljtO#RR(#sYz?n`&ndND)X z8UN{y1PhY@zB(b}59l`MEv~oC!qp{tFR^O+hO%gizd*~gg%8i(+7L>_Pf?RC(xuiH zZ=PasRw)$xf>$MWL+T!%*zs?Q2Im`68n4i+1ST2eQjf_rR9728@t5&5%Bhbf!3!FX zkqc*T$DpRu0j;R&PxZEvoUTVIJVNE+Qqyc#!DaUJ*zG8G%H>@-E3CX3RHvZajrK@A zt|f+xq8qpa?V!32R4pKIf_6E-`hO+FYN-m(!HFTGH@1C`)xwB?jKBnrt40Vj@htIr zfZ+(!MTh$oy$Gudy$y>z=-|X-8=7~A%HEr4+@@#&N~VQ)(6h(jKCsza9(W!@;y&<9 zWivG$Ackk(@@6~2GbVBa zm*PrDrWm3jv5BxuJU=B1Z`v+w4`t*6K4^Bn*`3}#kt zcpCc?%())GX`v~{)>(4Tg@B8&8sC(g^~#0(NF;`V?oH#91hcFB~2CH6lW}u-K54aQ-Nv zj5fVI)!DQNmZ*GRvX9Xc4GeJU7?IGQ+~0x9-Yc9%z~nJ zM|Z>>5DRRtf!2>YrSng`nKaDz-T@>3Q?amg(KN&%Vaz9k|Us8TST} zQUI_2w5C2ryz8CZ`HQtz$#Yce8=<2N8%C2XgGwZJbf)#vCO@MO@KOs=o<3iZwo&Pm zQdM-~GdXhtf^9rn@p?j4h@O?MJgzOF@)~`sHvlA|Za6`^Z7X3UO4>=C)lBH_w)^~q zGO+@!dNEjp{Cw1N0KMRQtF~7||DvG;a3L78WpB`8s^Ka!0mX zY@-X*LIixJiQy{1^g+wXdr!2Oc+B)^GpaeW2))dl4W#amHZceh6QGRY$UgR%Gk+7q z(!=$_8cEdE4;l)lfrHy4=6H4E@K|q(e3ZXw@U;234$lX%LCTff{DOU26HDscdJ{(V< zcBvi>ym~hswmX)Qmp}gPvI3r{RM$t8e9GESNIwYMJ=dwikUt8c2_3)32SZK?k3wSh zgnYa9JQz@ziRVo}Y_L+&y%7a1HMZz(oB5pm)B-U44Cr}rjJJ|K_*jfN4QrVq*_b|1 z2|J^vXpRWqy9Il~2|%Q=W4+SbElOxQWc{!rxPQ^6qlZ~iE5$F)X)}WEwrUsu`E+-F z5)45)k>J72 z^-SU7&u z1}ZP*>0t_7dhyJNhm*(lBlFE41*ZfCYr}Dd*v58z3pR4C>LMNnYLBqpBP~{)dl(CI z-DtE|cO2Bz6@4U8uWusqTITmpz0WU=w%|3TR2qvt|01g(@?)xZj!a@gu2vD}?Un4S zh+w)R_=1n*oW`oZZC>OFQ{Z=WPg*~lYOgm9fj9h3qSpHNH^*_EL+ch*w(RXbQey+2g&p;#t55Gk}`SU4uMZ&gW6N60`H*x^s(xmm>F=`M3u4J z_g@K2=o(w8yk2-qum?}$ukwa_9e=I5i3xVZRqiy)=U`OSLAQ;vHi83LGIhDm@>{|a z@2sAE7HzB9?sQp_x=A)>7fjt3>e`Nw9D2SoDEiDCm0j@PS9Kr(Tr>S~e!EWaZAXc! z`3D6e9-azY^$+7(jCh>Cty6o#FwQ@a=3L>!q{?}FjGN^?b#^6G_!M=Rie`4w**XKk zr{B1|x=R;o$B3W*L;tkYi+5GK=DWrIL4$_Sw)%*_Tgr~>c-1KSx4;WX#EP&oev+u?MGMR#egO2ri0VlmTLCSH*k_A4&~ms-KRUnk*lYXB zF&L0|5pgxvslQf*KXm1{UJ=#9|NsJ52WHMM-``o`T~KlH*_L zUl;Lz{Ad_CwUs)@&W`{70s)LrkfpsZJ65OtH7b!2|7+gp7*YD?*(Mcn$D(!S{CTB+ z{@K6oW}jyOEyufAgzy7n8C3c@xJKcz62Cw9f|Kq59HU`cNJkCAqZ@8kurnG?J|xzRuBPu)}N zFDEvw_xJoW3*gwZU5t`slj;H_>H*~f*d-lm0-#sKIQ+{OB6#k0AZTY|Q1V01Xt8z7 zwPs)-cA>Cv2ZrfRO}D+yI`~b6p{_$rC^DUsaOFl=JXz2-YgnS%^`eC`P>aICJ8%qB zVW)U|(=qo|)(*I_enX3uz~_EI&YWRt&bzU6+6N%?G`lja?r`x67cdpRY8^889XJPK zDeGMPxQoAGTAnp6YiFboQY(K;753}ccxL8)d{&>p?EcIlQ?dz~S+gtM!G|gh>1|co zqbhJxwHv_pu@ZPe#5#hfWzk67-ig#_jjjPbMd;Pr)N!ufsFuMgC-BA2n&tH(Tpm-3 zMJ(s)(^pyUHMOPvmT~&(G?%oZmS)tmuFw|~zEaFGDt^oA_-c4-_?7`Q=l=V}oVk`w zvir5~i8Rs&n72%x6Z72B*4uh<*h=yH3T;M8OgD#>J+OTig%m$^0`*$=OA^r8Z=KGYpDg{byOFrED)s(AEP@9Yqk5ioVJQx6V_t`N z;~!*$KnK6X(7{g5{$;#*|)h+jNY{cqo~*RqYK^*AAETxXTxitjec%=eaF! zkLje6$3541Sgw6zHFROyz&!-DJ{~nrUZ0i8bx*3*`_<6=Zm!+FN)JWWPY(+rZYAmN zB-#WuVgLtJeMP>A5l@)TcUYt`@dB|?_}YKn%#fkoZ6>4lo5C0GZlIe#+$)Owbm}Rv zp#1V#Ptb|pgQeds!@cnfpm0HMlZinV6#sy-rfENQ?1!?ywWQ+xpBM>vqu4LA`X zO#pZ0yJVPNBcP8R0)W6WbUj{y!Xg7{{~PA6OAoMvl`ynK3SkeuMG8bwqGh3^FLvcD zEKvGU0RyuUz}D&-C4WJ1?L6o$y*?OCSWI`BkD&rhwkWI=Oi?qy}UAvG!=(2{kze*lz~N1()97%I3qht{0* zbHF|Eh8~=GdZ5iZ@+KLLLxCa=VY5-|0_D%bnChA?h~_nic^21hje_YUGP1^0tWE%W zIsz@x<#y9is|l=vNt<+Y3?9MIILbJ2sp^1HZtmA5#8;T~X#g<{!xAdt^n)&(z42wf zqI;iM{Ce|+u|ot&yCb=_e1A0~x}XJvYXlmtJ+4zJjl<3SDobI{50q`$<%mdcYQ4!& zt?aKlZLlh8yGMC*xrbx(2)>o3K=^|>TpI2#k(`!! za-|V-KqN7zXFKi`)xD1zbC}Vbw}7YG1Jy^;MNYeV=#&^FzBB_z$6sI^X114rqzq_M zuJ?qj(xAg|5kvl2Y*ijXc;D}J5O0L)a`a3~y}(lSYZ__7up#({Ndit>mo#XEBv5H; z^loAcaCujdjwCr|ty9vyQ~r+1rNw@sqwv8uFG?1Wuvg&Wb!Y z(%9Mgfr7n)zd(p)_ofBvp~@H7#%xi^D=b=t8FO-ps;}Iqi�Ezqx{kao~}w;=Y1T z1N78M^XA)ft-PkyiN$z(O<(GwV8ZfJds@0}b~ofjId`{A0xynmleHK`lZDb^iwFfX zhaQ(Zb_i2N#w=429s-x(*QsFY`Lk?H$?>za>m;hHSB_L4_t?+=mRHD}2hG@fRL$H% z+x9e`-;Q*YTB}iAXb^gwqWfB#i{rbgN3J$UCrW?-Qk;rwYF((tkRC8i*^b&2`hcb( zM~-BQ@qkUMP}r;iaiWkEd}Cl6;_gZ@jHr9eVwij8;)WxySS8~c?7wFQ#1#pzL3 zpIlQEi_fdYyK<5Ws}ckF1Gsyq<^^P~C;}xzMHteTDMq>r?aEYdq^k~WLqX-j`HK>y z9p|XrO_ndUw*n7-g@d@K(lvgjpiQ!}8|hmWfBP&-U)8a7zbeQcgN3N9xyQ91P}Z#Q zf{G{%d?y6e9_t4gNN+|#P=Zhpg~b}fsIpu6(~wD03|MGrz+PeY`ao z9M98;Z0x-;86iA}RnrBmnLF$w;NCa@2aJPBb ziVDxMF|1ADyaq(mB!Nnic-}f`)q6_c8>L&=SV5f+%hID?a~?JxeKL_N3t=@FuEBQ8 zzuQ8+x#%1Zpvq~iz58;9cl|P!H|H8?6?lAzgkv!Juu@gC#{_sp+-O2#$&+ zoxHESAf`g7dGhAtspl+Qxnd3HPGeI5u8iOSR{1^t@fMU0Pps}EzH)w{@3#kJ3Dra% zugADP^7UXzLa8a#_FD7N((|3t^iL!b;B6GQlyzOs!(CFv(V3!uE9;-UrG?c`IHkWQ z&ma?!-XphuX`N=YSKj+7VPcGgW~NC9K6k>aU8Y}ywEUOpdM!T+JDKju=WKogaEH-OE5(HeGbW;{$_%KLmyZ^hbQ?fXNvz7mXOsS2iqwd$i z@BAXXh>vXFMT)hnNgupU7bN!YP7GDFN}M|O>9QK{oZ{iEMEuMa4T-ZE~|uPwbz!F7rCUWpr#qzDirT{ z_cwu(JuEyx0P}8|sKjBd$6R}D7!w(Pz97<;sAKjQk{c6a;v7Qlxyx|3kE}B5et`sq z$9;`Vufh#GZ;LZSZRg_reB;-l9`?#Kk?m`j4fN2eWl|L!apjn{D^B;4+opu{6Rs!NH$9HtLNhZ1TP= zYnQ*B2a9yK+)#~v)f-81{8*-LKc&>n66r@h(|H|l5*Y34ySxQP6=RpDuKE#f7fSC- zV@{^l*o!ez8P~PipD0YM^Cc-Zn^FF34=r zX=oZhv_$mU^K{V)J=)u5hhmomk9(e5~{`|CWb<>Wd~ymT&6 zA?1BPsyOz6#E%fM;y&m5gl4>!sKUXyxV4uWYIWC&QysG0W9poDpH!%*RmH#VPl_~P z=6`KiSHi@x*rmG6RLe?fxd7O-^qISy>`w;U%FW$H;~q}qdocXsQ_r~Sq%4wxr^#uU z1aH9-l)F2_M)?dXvnitiNnb@Noq0OO`C&qq1A}ijl2;dqeb>e9wD?w~m#yTl_S}P8 zJU^s(k?sxKX1s;OSJV;@uGABSC3@nTjUod&aq``ZXwK5MGo(AbWn`+|<(Iq{aJUqn zg>e>lg?F5ZEPU_F4)RYjC|$9nd&6RCaf0u3J?Axv9hJdtkaoe5u zzuWJ;-}&*$*iSrXWSxV5$0uSz9v3NSJAXB268|VUt6_)2Hvl zpk*vFP9ohj`&OpRitk|}KQCv_rPFIb8Iz@QzCX^2H^8P+ zx6Y0gs+{old3;}Mj z^yht4^OUHDO6atzh3neIu5;tnjKxFabg3Jzee}Z?OuKEY&tFLSYrdO|7mTUpefY*Z zp`x(JE??cRR%GMAwN8ne$lUj-XLZb-evd1j?Q40uUBUOX+d_~!kJAnCuuokXcYeHZ zP@a)na%shz+l@>3aVIl5xL*K=`2O%Won+^q+GNF<_3dP6WKB~z-SX5p#*b4iN7~=J z6zE$16gr?=y04hn{q=`?!TGI)jIQ4_f;&B*YQ{iTMvBF_%17kDJDhegTA1{mr@kVy zL}|<_eR)#sb;2dH#+;vd6G87J8Vzk{n0r$VvhUUgPt2{9df*(_)xdNzBDN01FOR8? zyc@Y0kPzNk`1tBiX=WmZPRpt>B$}TmIUYZle6vkvz}l=VdYwN;`}!-+oE+y%QdRM2 zLS=-W#rafz@eA~KqT_RUIOQC9Q&ncZz2^3e>=%yj=-ei|$S}+jjaIBh^sbvlDU6{qHDC{-qu~Uuu?L&-pf1{ zm16qo29HE;<viYvnxA2tqf*?qh_9ZkL=1L+z+D~2pKjaVVK3NmWO!j z?~A_*uy=^t1GOT?A*$IT@ZHvJiQjrpP|>TQOqt9po4Lm^4(tlquSdnx^Tg4Fzq&y^`EgP?_AhmpII=RI9xo5U< zI9R+2jY^xz-+nGj89Mv)nR;xpc_>4o2~#lU9n2MZ1Ka!i%1j7z zzB$v$_T-Z(`%|f-tEz=PWRX4lfk8EoqHV4!hVlFqd&N4*=w#lWQLz94hgMyVzVW8@ zFB0Rnke19r51G029Mdc~hO>mhxvxcCt4$Ub&RNB*ry@KT|I5nvg%02ETygY14<0mn z2+|P`Dq-ik=~&Hu_nlGch_U8rtg12MVQFz zmdSN+SGZPZViwxMaLx7b{|0$lPw+(fLcEr0`}RjWzQRK7q%Za05x};@v>41>cgu@$ zt5c_-rvF{gC?BlFpq3FNRq%iq`yN&#^J0R+Dnx$s5ZM2cbz?X z&faZi`$7qiMX3I){<(a4PeZPHfxNb9V8>zDZNhER2a5|TmHRDk2R3-jVxSAR$aJh{ z2TRhEJ0oxuRb`d?Q3+yh**?WsX@xsT{Jr zE(}xb7;MWZi9>hJ2K-x(8hEY6t8NhX%TB&ehj>=;(&o`h9rHZ4? z=_VZFf!1pw1B(P$N=f5XgEKR|031dY2DU&uaYu&3~TS1!D34Pwg(H+m!#J_)4urVR82T0%Q;v?~A^)OI^8%>1DhdjQZBI%i(J z{Mf@t+4KFJuBq7BJpltUQH98UdP$33FpDVksu(d@NzgpB6!Mr@5n`~X8Eg|Z77BNb zWlA&BrZcarIX|$V;swE1i?b9#p_NPTI!^5_UK*R*CKv(|o?{%dwy@~2-@UKGnj|TV zug)cC8C-HzGp&2Mq@<`|bzT=xZpKt=Ix5dpIXeC6lcZ9GuF2O&$+%m2;XS|0w2&WVg>Jk5*zW@av)X8E;f4mg= zu_i>FO`?qioRY=*bbQl}lmZzB)XX{TcAHv%4B1ip$MvMzM;} z4Z2Xw>5jV44JpEkMr^jRyG^*>=nvT=vTA&pbJWZN1(V zw8~4HQ=8`YU2lBYY2Z+4xw4cXn9|y?`0L~Mq^Cf(YWQ_WI!|^*UKtI!k$`E$q86JR z99o2th^JU9H`rP*vD-NJQ}6y~%cX&|XRJi|2d=?2fQosL&^O!7{%+fTz;!V|G=ck! zjj~kg^6UT{mu`5Id*Ev%`(04Dc9(T2>lm^PR;XyS1Bvt+Zd^0nywjQ9D9ezHAY3|O z&)^TLBH`Zw6yzzrwNj`v^Pf%u*joOJVZY|cSS~79kB;E29 zC?7a_8%Ol0iY{3y7}q-sDsZYusC0bN>R>BeSb`)H@|h0$q$h+e?YwKoMp&YQ`+XSUhBx#W3dJ#ZeuM;nD^*!=wn&@mniXGDc#u1+BLVwRYmT zh{vxJwDGOM-Cv(u3{bkWfju~vZDOSAX;=_V91@&{Y!5mFaM!#Pa- z@9U!;I#>g6srtlN8PN=Fk-5D;M|#4?BQzUW46n@Eyb{e`{dcOC{({GErY>4w+Kd-1xvTp-Sse8H;)jz95eECPPJFek|oYC zy0rXPkZ8mrX7b<=g5A*QAqv;AZyufPl3H#brAD-UmRh1?`PQX(rP4pT;|eGdlB_j7 z*}NV9B#l*KtJGx(Eg{o|Dwk%*$ZJ9ks|#2+HBBTEN5 zJ#eGaQ|H9Ogb>kStpiu?26HXJ3FHDfPh%89O!imr;)#S4)ht4J1E^H6vq`ZfBd=0UC<<4=vi34_1BF(lJHtM&84HU zy^4zG_jDY1F3&YUk|#*8RC~aJjfsh=$VG_%nC|k~yfD`oe|{D^l`4OHPo<|%7~@e# zOexz3!mGm7UT)hzRGhc3{>DlYb}6{Z=hD;T=Ep~W8cSXhjqtHQI*R5rRt4_q>s@bb z6xrzh_T#@^5wL~I4*8t<-S#E&>c4*cpMUmUK_}+y9#H_udzrrmb z(~DSL!D}qroDQeaf)uMu@!c29$^Ww~|GAR&xnK&Z{SdadSApy0WO~oK?$+v)2#7Ig zOJDcwZ;9==KUy|00(&*mgFTz=?`)Z zC~ueL+tTFZx+g#AJA9q{)r{ru2_L_2R-&;h}YH!;^QGRp$w8gJR3ls07Sw@HBmi<#u7MfXEqnYu3wix;^XHpZKt>nT%o?C z!aoO!0ulG0CROJ)86k}lno3DjvNf(uWrCm#*BrH!tkCfS*CO5XbK2|K^TngIwQMK8 z+hPj(sA17kN~Henm;E;(Vf+yyGspJ@Lr`?o?|nEZjYE!?ZV(*82mrguZ>=2& zS=;FKJ9OEOEd^I&__aU`B!D1@^&)4{M>xk-g5jHPh{Qp_6RQH|1fzX)NXct>ottca z&ORv33<-4=ZbO-F({uM#Wq~&cmXR7)<`ggFP;`Kf`U^9D6Ef@mUDy0Igaa7ip9o?fr_JrE)0iE-+M96m_(o(0oR zG6a2#XjLOJKIB7LX!Z|4*zrB$ryHP#G>ruxMOf$r`{U0xR0$tc@Nq|;ykUN|t=F1_a5 z6;G}=G8TSKPm#zRABfpSu}N$hq(guS^hG!yfggO~a%yAAbNb*aeD<7&fq4j}1w;4L zIh=f9;^1GSPH(^N0A@4^DmCKd6(Ir4@e-QP*@>~l1ThzI3^dCMEfh;yUJ_^bn0q8b zmIw$N(gwA2(qCk>GO8_Pgn7QU%KPHk~ho|J4jPCqY>GEXu95l)&mJ&IX@%zQyW+DNhW zLNx18)4=Cn$4gc!AM9o>dIWR^f$UFDS_t$g8benMQ;G3Mp9zmlawyY+Czc~AW zJQRs(p1sa4RbYUsgPi^k96ht2f&UnIIqFw*@0tl>N-z2a($bi3$%yiJNLhj45a@ML z&yq$P(9Uwa2Axt;ZpCvv zLR9WNtPHAQS1hp2pBSzY``3Gi$Zw(o6Q!5`eh5J(>tZST;hn%?bArvTumz>On4@81 zKcAa1t%uPeEWm_vKkp=1dHi-KO(`9}X^}AK!b|SFruXh}Hz1Jims98rpgGbO>}<(n zQUapuZS+WHsD)){Rq&NdPOA`6pxbf?Q9H99;t;NjmGk-@n!ooy4b4+z zs0c0;kzs1up%iV{Sd(P}hG1cRAVBBv@T*wNn7G+k;o-;Zhc#)1kE?^vlcR<0X?4y_ z7qGy_%ahk)*wCECn%iUqJq?0>&MA#vAo*3&bfMFL_H2{xFQ7_3gA}9H`NjQT*9b6J z#_y#w3J;U<3ELh;n#iC;LMzy)zcHLxWv!!NOu39zxv_7?!zkf=bp~x1HiWl7-raPkbyi?Cfw>)w~>KFu1q^)65h`?q}y^b^xBy*+31@~GQPSUF#EM3 za1DuqaXBihKKw{SB-XivS}HmGh|TiRK@nvmGawbeA6rK72=|6d?}M~)EP%s{AKb1s z8VE*Eb_<95c9N;e)Lr^VpY-MY3w}H`(ne#@Ga-RTuzf|^ri5oMzKq8XREZaTmLfM5f)XoYzf$1v zcStoAhob`jWzzyU=cUM=OT4zl_57$IhbXiwdVnbk+BuZf z2R7^3?_Qw3`_yMj?@1w4^8R{Ib84(HntCERPQ`_TLT01G)q7RfxzUJ%LS$ZYNGeTNANLoS{ds zFL*R!G2sg}=FwXtyD;MJ7AuXh5NxlQoP^MT@Ei2iYAvi*n^%8uLh1wNgVK`A;Yyn_ zD5b4Q<)v(@0%i~b6EA%C20b9f8ZZ?Vj@%&q)cP|V#{%%b;UMtPc5 z<;e>@W$arYT5~UBm57lcv}YEm;K+ft*6}Rivy3dR7GseGq@fSb#1MQ=Rr+{?o$Yg2 z{Pn#hMA@cvhjfcb{%xD!B*UoQFnoVlTh*XR};7=#CDX&j2M`!b7JeWm7 zdxkd?D-5Jwrw-h}RQAi3>jtrhj9Q`H4V+i?CvV%o4*`b~H)IKn+h!11oBVSQxm{6?X~fsH1&)+x>RH2lh#{fGD;O0G4f z@`|yHfZ0E)-*V+`$b%GD-tD(Yu=;Y3n{-C00 zSLdE_a8}QKpjKta`C5zxefr&sg)L?hcl1YV`Y0wduFlyrM*MSObc}3!NcW_WHitaQ z8(Hj}ESgV#w9ScMYi@2N?9_9$zoPS@Al3@a1Xt_kB2U@kz(cu&`DCx0IC=FU+7oNa z<5N&$dbtWp+BDL|;@J|f6DhsXQ%_I4w_y0wwEcdui<2AZerY|?rDr=hn42Uh+xT_$ zS}dJu0v{Q*cdS{gwT6yNg$gidBIT);`z+N%+Vf-YykQ&Y>*OAa;MeO+U!iO&w2`we zU;>|5UeY4&wtup{HCeHOsTU_zXZ-; zco801MWkibzXz522T#j8%GBizd)sPbGkS+4TxCrb(DgWReDGuk04sB3Q#Y7)!1Qi# z$fG&>NACLvTkm=y>fph5l;(W)wLoWnNTsm2*4)K$dgHOcSS7g?cFWQb!PRzg!?NY{ z@5FHtAtV%ftwmSU#hMFJ(ow|)vM?LaEH$)*RP+Zp-_x=6d z<;wu(;6w9i1&+x+z8mPz-!s~3izH6y9U&KC^y zf11E^@IVZrDkxAph++?HJP{9^d6nzP(!XC6{9?!Fjri#?oAkeqDw)w+;L3U2eh-rN zBFfBrKgi6T_f)Dr42S84GHwHNOLb?2r5D`lDKB^{!>7IwDzR&*zQm%w@HUiTSw>A% z!}%tXhmygTEU#_X5V{q}K`RgdCN)l0&3VWvVU zY6j zIP~28olmFIl)g~58F7D#lyUv*9PnS0A)p8U=T1mZ-?w5XoWjYwmwq8OXdMiX7_AR8 zA$J{|teuH)Zrz6X9>XIMV7*<;mAB$&m7laFUFI;a6%Nj1^5Oo}rfaUs)2A6D6dI&q zk-3A2M?3Gv*x{Zt5O?0he{gv|ecuvm7o_!pm6lXNueX+jQfp@8BV8P&8Z7Rp4l6E{ zKC3j>b6U{qbo&l7pYn2)lS8}*fE%u%?;hG-`xRxdBK7{!Gbay7(aY2FiNzZ(sNea( z)$+YkWJ}E#U0tUCAEviI`Z8^Hh#p@4eH=dM81o@#MO*N~_=H(FNAbI9Y*HtBfih;Y zYpk!fpsagrJiOGRe;4{i)&=2F7_A2S#Djq^q8T38vsr3l|^*MDN=uxHMiCJgXoB4>>o9^-llPCPo zN5APQsj|z6yBeTo4DZ8QhwQD8##Il3Hw}WA=1W0!erEXf(;PSiQa(ztD^C4Ew}ZF=2sDFFEbUN8T8H>|wo^0?F$VBh ziGhcAWQX@vZzos1HyoDduTC98XjzxYk9T%$rI38Vc#r+VwEUXkR#+bMc_y<~hGjB2 zt4XkDO$C}m@vfvas<#}){AlZWAJ~GpTDhpy=XNGb>s@1{=WkP? zY|zSj&_rd8Joj+@2JPj|X-3V?@01E03dM+qoWX|fwN$AZ#Wst3&V};Sx<>?@)$wIK zi?DTN(jgmhI_?emhf2-g#_ZluN>U%kRggoidZ2$|vcT7h4UVp9r0UM~zNf3u+zhUz zr;j2MG~ctA(u^?p5!Zs7<=q?qp#P7(w~DGW+qQ*)K(GXd;O_1g+?^mv@Ss71ySw`$ zxH|!Y#&CCsV1eLHa1D0ns$F&3KD+At|9!j<=dD^xu~>XE<`{i+sU$;;eO60vH`$LV z$e{eW8yZb~nph`kR$$$Kx^2d1>U-I^WRy+tMN1tTZqP%6wyXP!{kS)pgTQk2?ahcc z(OCFTJu5R>>hOknJu9_T!dTGvdw%@3`2o}a6|}{+CfbD#EZ{*?j*)8mdj|`?v=wOT zDuzq44jr-3%6$NL{lQ9|c776QJmR8pbzg4sJ9~AXcJA=T1U`{*Xu6C6$Vqrx80SrJk+?FK8I)gJpGmn0#4H{M3_LbZFm(qR z4nfNc%wr|4T7feGG-#|L7%Q}Nma%8io;zV)0+l70lGxZ*VHaGZws9#dwM6g)ZUN9Q zZg3r@8}w{YZ$7-+Ni$Z0%e|R6g!76<^Nc_VLo5Pr3yfFg8y76WXX<~Ok-<1PMLzai zg|G~>Pxqaw|1d_vPyX%D`!>9Ie3cQP9=3cmfHzGt*$oc3Lm-EvP}ZylQ((({1P9Ad z{f2x^v=0z{fwU2Fv%;5M)1EkjyCcT-_r?w3sYGh&iZLua5ppI0359}bIubaQlC{Lx zqbIk4?`(vr=Zc)Kj#j3Uq_(Bo%IK1f{$8T7;>F$pNbb|M`|E%D-q7lp+6J$uA`(E< zw*U9R_Fpq@z&tpmtWS?W{?qFQeKgqvpitcMV|K59dT^oRaxQOxy!Fxr^b-PjsoH?} z4VamN07@Z%;U0n~5%{agehY+O4yKIdO$Tu!zzy@0BMe#xG}}bZ3gm(SRDcF(EYd*v z$TkE(7-N|6`%bqI)uez&d)Lm=c#BNRXibDd?nmH!fF`hvg0mV&#JdNb@^>dR=n+VK zGexSwXzIr1IsBeChW7Cw(7cIl*{yO{Z=PgbnJC|yy8ABxb#Er>7cjCzJv^~RCpHzX z0R6oQkdrd?pbv*5-kY$9`VIQ*(s|FoCf4URmq_tH37u1+F6?Iho$fG}!3>}NetEa7l&I-f)3N@-8-Gap*Oiqt;Wn_>t!C_Vs)40{w9 zFIfTuST8U-kbw3w_v1THkFH49rno|(tfSpUSi1Y92o+}W=mDZnUb&c#xH zSM?TfntTBYWqEe!z4d&UN08(S*vm=@rGxozN8sFCm?OA?SBOD(tKZoY!Nb3>1n+h* zuHCctVE6bQ2smSi1+WAxfk7slXz(5bMb<800+bY=jRGBOK{ow6fyn1e)91NN!DYN3 z8_!&{=_RpKj;EJ7?wb#N?7T&K$}z3;@njPI+(mJKrjFNEubdVZS(CiYCYiwjiz6+V zN+8d5V+G1-D>Pi~zrC*f@{cOlb`vz?iSm)#nZR18-fvaoqBlJ$EDNvUItoz5FgO)7vJVlSoaf`g82e zC55ZxBm6-3y7P~s;SWEH2<3L-#}fV9#R%AoFt-iK10VUou|f z0(6tj02ftF_}}Mymq2Ucna`-6fg9>@49U4I9A^BUxGPsogRlJG5OD!;`!hprLEVtW zX!{v3%rrNWIB#r`76ll~7odu+j27RQW>(;uL3%N%WP+7nL?*HW;+me3V1oC=Qaut0 zmnt#9bccaYrM6a0HsWRfKF|D&>GGtP-{vq8bG)DySu?mDR!kveL602oofL}ysvhBoS4fi1f-ruV zlGmuZNDx9E=_Xo~>~lYG{-ZX$GFFMj@dB%$f#))TG6Zb~9NH+7(>!qNHQT<<`kXW< zbK~e`hNLiVR;x+m+q3#BK>OU*+Ig=b7Zzw_gWIiwOYubeG=*`}1M3U0;!MCBEhu@6 zLKZ1rhIMq*l@KY5y$nM_RkYBo(ZZmSFP4$rW7Yz(39F1ZkIcXelmDbLv2w&5FVBKc z!@T~UPu3UJ-^Ne8-;!Y)7iN>9{F{bd80kByHHj)60!oCQXDc3O?P)8WRtL97Wd89I z!fVa0f(iQV;sIsM@3L-JLeUSlP5N8&rj<-8AqA=4lMuS|trP6$xD~5QR=+aff|3M# z9&x$6wJl2iARKeSj6CzG+LwQwQvbH;8=FLC7d8Ns zcw%_}o22%!L<6vqqZNQzb+!x+0^v7-jYA$tIi(x~YZ!^^o750Eg}1bss>5`bbd4az z*%-)l28A7PB24H}emAKVTtSe|H z=zF1CkcQO_wueazors8YHJzJK^0F7!TafQ`1!icnY^cr;8uoD;C%8+`uLm?RWtq5c zEK=8Ibt6KTqH#Cz^FZ<6Jj6LzXo@W3!e2qRg7%H~MP8|Rs#Nr@qKL&9kDwRs8Si&ThSeC0|p;QRDG^&AF;2<4hk3f)_=N|T= zLeW?PD~`87$%L9?g|^wI9iK__mlpsyO$C}uG01yWqQ(N@4Go|={AXQ+x8E{!Vuoer z9P8sXedQmTfHAiSu<8ZaHP;c?rW*e7I}12#0I1tM))qN|T4$)BEpDmc#kX2|7FYDwZ2;+=sIAsFud;^mKVp2)AgL3tB z3u}-h52A1+-2+hMY9h7$fZe~P);`+mj%@H8mwtwuecM#+3c95ZbQYJ33oEu)p}3s3 zIJv73_JiF&n~gJjC=S^9-2Z!u7&;Bc4`buXP&26)hUw`GrO6a}_fR_t7gd;q3ss27yhi&Z40Rq#3(*FKgOk4kbvgxsdCtc_OeLj|?0 zKa1Kj^;K9sq-af$1Q8G*Fi9BAv?L!BJLr(31>x2K<2uSQ^iD3@<|!XTNn=u41uDh_ z%Lf6AvE0Zn0}M{n?16?!ba2(kZFat=L}vp>?d@hA5?7j7U$X#R)(ipCMxU+ZkQOC_ z!oZVn-zH~7s))atNpF_<4r_%;@P%%vzIHpceMh!qS1QhTbk)LLN5g>}SAksVSK4Op zBME3!P2J|pD-0c;Y$UBWNOlnR7UIXFKGBaW`^X!46<&qAX+5xJk)m$%pkYLg=0e?R4@J zS`BLot-69|NlsXGKk4?@`Up}t*}g&dgt%4>*d z#9G7d7=y83lS(oSch1G!hr!2@t-z&a^A%bIrO4_Ol*nsZJFh57ekmJ3(G;+T?2#WX zecfk2RFx=L-G!yd{!JNyiJO$ySoEqLy%dSdTPj&o4y!AlWsWsr-wuR1QxB;ncYVROM^rs5dg@5^scedgu*uak;=b@CIu9AYT*zdtJXL{^>Fe=$ zu8!Axq9IMIob(}7wls(frTjAKQ>Zbxs*Eb5h1bbo-^>l?sPqAm!SG%_ppy}$oe+C4 zX$0W-^6tBG3)DOLuTK=)0CpnbA3rm`tnPl#iAC^+e_Q{&UV?DV`LSz z@x^Cyd$X zQt6{zQ|ggq6-56endqH$sN}O|GEF76k%xlQ2y~Z$k=25CXWHg)HLn%kEa8} zk;45;d65v6mKk6M1JoqduWs2++km4x`|#@6s~2=Nf)pqaKJIYpeu}$7ZDQcTXNQpf$Vb3TN)kH2mi;*WYG^UGjTF03ByDj<*va+DQi1NS2$vrHsT8mY z9(tWb_b$ay{&%mqNf8c)(Iwd7X+DOTfZgWR3~LOI5Xm%8T9`_w$XSpxtT0lILu2eP zAi&228vnr%Es8}6sNrZoWt&Pvd8fHvMKX;z<*B`IU_aE@LZSN7CvqT+&K18?RRw?|l8uTV5jg5CtT5 zF1aU84$O*dwXR^Mwz##yB6hLXM{CQxnc%qQAoqE?VS98f%2`CZjpIpOm4zwJr1yL7>27ORT}& z43i(16qvUth&ho;WzK7&Cac&|H%fg&L$;2qkQk^ zaSzTA=7R)AJooA2#!U=-uT1zY$~VO{=^;k#`T>b}N5*zbdfEm)`8KUKG%(G3%#0W4 zTGrp9rC}5ecDy<3<@V~z5r&m)S8AUIczW%LmR!JKxHNJ>{gvdCPO&5rPicvSUg<|D z2wc|%>C%cuBb_KqLf&&v?U{ER{d7d7jtW^56caNnkG&m)QKZuYCgIDpPn{VX#s^RC z#Zt#q2UqrXL^rg~uM9P{ENo2&rV=a$wr9sNJibueZP(yCf-iMKGq=4`gYi5vn1syH z?;~7^?s+}j8Y^;plGBX7+i;`s0hI4V6B3>J*kmey{wP}ANRs4p*mWd`ihY?&kr)~n zot@3K-p8BdL0~a`gnYBmN9krLlsX{e9O}Jm!T@7y+Ae|Kzn0vnEq$|O_4Jwi_cWE@ zQIegVF!b27wxPq-k0m^(^vZFoGN7^*XSvOAg%iHpL%tI13vlk<<5U%0IHndS+Y%tV z=dbQvH*Z?+WH7j9S!l|TXkSkZz%H&riVbOtxwtrq`qT;gvTt1RUwE3>D0ME)+ff@g zYk(C*^AsuJq&Bd77%JrOMw#X(zH)ZtoAX{-UMGg<52Vo$afY4x!YM^PSj*j}$r7R@ zC_(QaSClH#(?ks1WrYOuq6G2i8KLqD zP3rJ=)Hw^k8H`rPaB7lhaWP1A!78B_Lj5^dgJG++q`*Ff0{?uvIk|HgyD?O)&>|N} zezgL_6!Ph)Y#YhHLaHYj^1;X~w1(&_d+@pj$17<+_%Z-9qj?}m!|XLh-oXyzJW}O+ z+MIY+hSVIs_6kK=wFFK&-OeP8DDbt8kCIfO{42cI%tj|&ZIC;@ex?r*qB31WUr-wh zHY27c{Z5X~8wZ8MPBfImIh0N$k|ztfg6Eh!Ka81_QB{UENJ-&{Rg_aAOrUEEdGJo5 z^p7Qm_Z6EFBq74leq2BxB#{?2qO~GY?!}j!LCRMGuQZ|D|!{-?G~tbY3GZJ~d`j@m*RovVTW=EC`te3>7fa4l$#NcwjTN*)|Z6Qq-R6No1*NpOaD zSxLhk>F!u;+wlA*@d8^NIRP3;L}Dr@DVFDSO1C} zBt&x_O?~ka6}&8_nDyvrZDn{*9%lKOLyWSu@vN<+*p?zIV56>$1Yq)!8yFQ}X-pq= zolGp_(ER!wLFCmRDJ}6{CFbP_rS}VDj!%j|m=j4TRVEx#TR7jCHQUb_V}8zKmKEgh zi?WS}N4wxGr#Hrmo*#FL510-2H;__Qe40RqYa5wzF+fedkZ77#MH3hQo!B%?PQi87 zF+V7*L5)S|{`@S5aNK$!(m@kV1H*n0HaE%C&9=zt$DmOnx+K4>0^gbjlAa$^@c5e+ zGKp**f#X}-cYM=d&$x$TcChgM>FyF}PRnlbGmB$M;*bP)L1&^*H7uryBpFE2{IVSB z^Lp9omIG`^0z>PZA<972b4#xnA+fB3S4A2ejlXrd^4;Fz$U4>UMvxSJ=pA1nx5}?v zOB1?(gXrh1S7zZG!;zi}S_EUXpyE(sfA%hKQcH$w6YSS&vgJOk zPaQN29y9{+?&7prL#nd0Dts$Rq8kx5>Q^hOI;BovUMP3(z4$U<@_w;qy2>z%kRnaq zwJ+e+4*;kOm`2q3OMIk6aHipnSnFJ{{+fDeOcp7uPhs1wxH%D? z=gifCozOkw+*DXel^*uXzWZxGrL)`n^oC%r1T?k8e)fSM33)vRMc69fq}`<&%qF}5V=vhv9?pb@zO+zqT+Sb``gLu z84rBVMX>H!@_?&UpkvcL_*nO^0{PM1cnhKY*GDG|p809)BbWFYx7#LGkHp|7%GR3* zrN4TFT5Y2AV<-N28$Rh_`TV)t_>*d9Q4R>fn&ke+B$Tv#vnQO?mELuV#VxZOG*sde zJqKyuqca3EsS?OJwQDO)vd9^&II$u+Fkv`i71xoID?MC!yPa{V)K zR?*Scrlp3#a)OU>_x<<%<$O7T6C^|%sTJg=9IDcru5 zYbfQiH>sf`v=y&-@)Rxco0axQmAgtYgXkn`WSV^*h1ms``7;2vo5MKjJToElpwS*Q zh9gdAYve2b7)#glEj4}9L#^*D90mzJ{`S>ZWJy)jwT+`uk`Pl%wpemh1c*qdr<28) z{`CS_{AR9Y%2sXKz7+gEQPs+Gijz*-9dC1u{hUqfottn_X~gGj|Ei#-7Lw_YG)ErJ zRI(>T)MYMLNW)RDeG}K+AdTf-UIMaz-7LVhuSW-Pv2Y4Z@jE4()UaHY{1X^Bp2A#I zLuY~?IJLrhC_%ha+@IJF#PSSbEd@xPjL~CJqM0!*y&IvUwJVe_b|5HApt`1v?aF@R zP!2)cj4z`sjQbEst}Yk<%{Vaz>D40}oF`eB0cCm!gf)^Y3^mH%iRng$x*SH65OHq+ zD@);oeiy5;Fog9n!TLxoXct0YHF?&?_+1Ev4}cdzBxAgZxC9C*PmW{f9evk=|PATP5x(RMQuh;3ha)mhw;~j6|<)hh=#f&%EfO z*A?_-QTS0>F#b6BDTM!n7^YO}JjGqFSOdK!GN?x79~%ZKINuCN#t0JM>;6n_ui9 z?qSKs!2$r!E2A~9>w@b{LVKGndbgtrAsR`dR?SHsV1p1T7)=>;pdZ#Rb=VPDgHl4D|RM+$Sa zRI{;EcsvN=54TPhk1XqP^ATwk`{n2aUhs}{sIiV=pnlmU&YrRcu&%Ph&b8&&@$GL8 ze2WbIO4}zCNwTia(SYP;u93aM zFT@M90}D>F1(j*N?`&}(5|gU?LYJnu;pn5m9YG|ual)teT%}5JaR}?44 zG_Xn{I(`!Cq1!#)F}IP@_kM9X)etcT71zGS%>#v-ren z;>+*U66~};NQ$S8{0k3?45uj3Xk=@RxCVp-w??aDOiqlht#}|ZaMdm*`&6zV)S-9$ zI|HlW>rB(ck~j8r3>eWffT;`n7GE-t1<$q_tXL7QaV>WM+g2&UWRwW9F5x;JSc55! z;$P|vOc|-&n4@C9+JwkUnKA>g-P!TTGfbbgqMY74yo5p7w8CGnWEwAoASbpX(Bais z$%26SCIHR`6$2ml!fmZ{0YCZ8F9QdmIuyyjP9JrG1!d-*G7?^&hIcf zT_f6?T3}4b7!g#6x5`jPK}Gv3v{(o%qEKWSqjT3KtICZ+2GBBgW}bbp2dlNMNC6Qr z`WYNZ7DvyWWuRE0#T9FoouvqycFNT)=xZz>Eu}eACInI);s-S6aYzS(2EQ0I_$(&yh5JReh%)Rg?KzOqC74v<9H(o{9J2r zrJH5}a4i&ne+LD_w9pCuIH}&TX34P&U}@cF1I+u@U%VZxT{1#->W;Xr zjd3)vtme0|8((0uVwO#kR%s$C6KKZ}9(N*t4%qliFI)9%(n1%aBLvBt|6Uc;@%5vA zO2=Yy+m;7dJ=PVvb+LR|NX%YulJQ_(QxQ0+M=l?gAh6Qo6|}OVe!s+sZPJOI?G?Q8 zES8(~ok_mj} zhgHvABy=b0d+I9eueUE0D0Y(TB?Rv(=H2bMv*;xcV90mqRwA|@Q*w$xVgJBz`QOUX z#>PBnT_e_*w%Gx|Yx^F!<%FU?9XQ1EUbUSY-zK*zt?aiIYC`)Evq$^k)dO%a!y@xU zJACG9u@WZ6>d{yIm=Ya121;yfb z(v#@*#0RL@s8q)Pk)ZJx@#N&!I8w8MTpiV}Ux{h)>Zf0@rjl5Bv@We z+2?jwfTf;gnT-N3ya>Dk?}?;8JzaQik3r@v>9m}HZOu9{172PFkv!Vf?K+*Pymm@J zF#0#BhQp%ELNx^EYltNs2FCjnU2douf0ky(LXOD-6zYrI`X%LBk7*>48PhOngeH78 zrp!A>jTOz_j~KxS_WXU=m=&KNeZyAcQSLsC7n2n4x#0D{@@H_szN1sETaNv(V#qkM zc*GE_l;S){PuCn<6K@hn@Z1PZ=txZPC4B7Em1Ums7vw9<+VNMp>wxu$sQ&gFWXzs; zEGdp0?lp0Cf|oR!mXxu2ddFDEfY~NqS`3 zprL}pai!W0?_(m{H!9ibY^GOC$f!ly)KO~aj4-GDdgLGRik3$u^hFHK5yh&h7&jo2 z10H~xY5XOG&X_<^9Gm;INMW1B@Y7E?1ZsvS#*Hw}kg=dyNq=2EXtTl(SO2;fQmVDJnPZy<#$N-C4G4UbprV$&)=ckAh0E0eq2??=AJu z(|D)0&!1Je%AtsHR3jIKjk%sR&#y53q(LC%5i&1@t9)ZJR^%J2-tQDaN;J2UGm=vV zku0Am(;^YduuwL3d5cq$=zJeBT*oI2vorK$)O-1LxsBxg`4<-jUXC>Ks-Vza>E)45 zRW@?ch$r+5{Z}>P#z~!SE_Q5NrGK66y-@vejj~<$A0;@K5q9$ng>h|;q3xIskIJ5H zwW$aTUylw~^qNje6T^Rq|F(SrQ}5^9{1Db3LPxP1OflZ>*?3O}V=r}fj%OcytdwuW zt7ZPxIF918(*2@OCBe7 zl?rptfB(tFbf8LDY`A7_0(DmeUgUm#GKbhxVrpwmRTT9L+TZx#V(4_hOW_Zyl0Q5x z9vmW>TaT{fQq%--_08v%>A#CwwpXBs-k(39qhxrtJq{GV!{L~304sJY$46upXgdgn zt(6nb8 z4FfsbaT>RaQ*VDl{tq7>nR@ZZvBN^;|CAO5nt6DbV&La>FTJVjQSiMa!=d44vuBMH zqMyfdF^kX(?$#ZiX?|=(KM>9^=0)a>6EbVvJN#PD_4w=5->*%)KV`AcpGS0czP!>lkFk84;nelI`KPB_h4);EYL!T{mon!8TUrfhN4IPIWaAFb0kc@&d*_o{ z;GB8(PJGS*e#$?5eJp?QD;EFy73iDs6n5U$_t$i3ju)4r_#AJCwncnARVNzx{BF(; zgpnWe%#3{mJfGZ{#*%V{l5m0O01Lc4S>(?TD-8?biX{JV+KRPJ_Wybe{RbVH<;hVI zzthS*uF>W(b1IqG+qY*k1ztW}4VN<}o_<^xZ`fGZ$Wq)5T(93t8hCTmE_p+qmgG1x zVj+|L<422OPTKyVJ}`p^AGW);lA|JMIKb9IdU{@J@Fs3oSf!V^T;G@H!{uv}hu9mw zgA?G7(7L`FMR^EhcE`DoE{O#9eXx*K|MAn?Hr8DqK_74yD)!wBwM=P|lfCh!Xu?Kh zT3sXGlR3Iazc2P?THgRK8qc_$?l&SPv)E2k+YnX$m5i#^*rvFv?`$&XI}hDkRC3fp zE`Rh||GgQ3ft6Nj3;<0`Kr$wNCu^!n5496;rHL99{574?LponKH~r(V{zla^4wtic zgQxiN&Bw-rw33|Y=owwNUxy#7C#qHF7@hX%ZvX#2UGDc9pGi-(?`_eu`z#zK_14V+ zMMM>W^SICRtP!b)Pl-H_5zJ*j0`~N9JL27KOdLlWG1D7HMX#2>#&CLNxeVkcP{~MrL?m%u=7T67uRJq(mUr_f`~Tms zM6MF(#D=8)P()=>9z6|9jNAD%M!ED8cxqDSjTgw~xZWB;V{i2Rh;F;6&_f?CuPbUc z_sHT1y$r6mkN%;eK)qSMX5O|kJD+R80YSw&G@=`%r_sPctpot{!{3dgf#Waqd%FON zPRzc0>UB;z1bA2905$YkN7n`jTjMb8i$`h#<_k}W`PmO#Cyt@9Twn#Kwk%=gb90de zf+{wF7!xYLA>FP57OSD;=Ump|-U zp?tfQ$@(>5bNd_QlZ>0T0+|HV@&veKLcVFc{r#Zg0lKO0ecMZQNHz^S#Y@*CZYP2!77^c#P5>0) z)lq>(#@z*w51T}J+Wt<}`U4>ILnNt#bnneny(xV0j=&3kY}~H)=N&ECP1Rvx^Uqu7 zA#ScA_ldHS>LF3ONxwG#HzW?TjZ~h>6D4k~{hmcfQbYeWdjI>)CI`FZdOA5n<^U0& z-^2j46^Mc&?nle*SKi>(%aXWg`tAXhQl?QTfnyHxo&m5Bb|b!=18n8jT1Fv`MnEj7 z$GgpZvp4Aom%4Cqz*GFN-uzhUr(!V|1xdM=lBn2&~^*dz3Jw27L6T806<~xfSgYBV%CIe5nF}d2&_?{ zK~JK9|9WIfaSfbma8s0@{{#X{@Uo=D&}#1u7iU84wSZWId#7;tNk5>Ev+_~h0k;@{ zdj0MwPQy3JpV*`%q@klWTkaH)p4F+f-qO@i<^g70o9*nfBpk-@OaleQ{bKK1LnhQQ zRfq|BZScz+tfzsI3*fbTrjyQ?CyGEWvznF)o7L#NE=hwdBJyZ%Oj@2B5A9l4cQF&EMM;}L(jgDNlwfFU+;9{nu?}9^bGkB#!jw)1_pmyj0MwWT@ zfr@ZbzQ)F*yz;XIu9>v=RUPF$)cyuA^dY0*v-~SOKyaX}zx|8eH+Q?Q0Td|mn~EN) zf%*EL7}#SxLqm~21MxYid?!DE2TafByl?YSqP5KOl^`dD!C3X`$CpD>fY{qlg~t`+ z^Rwie9y*|GYTXQqG>hCe%*6TD-vdH(e0Z~{;}A9EVt1xw#zJanyty~}Amr&*>8L2B zV8j{xt7)~rP0BAsVSoATnw#}q(D@7YzfIwZ9d1{tgP`^UqO+}v02@52(-#QZKvi;XLA9m7Sv52(o0b0HyuG^ivK!a zvxi0M2LQ7(Xnw5(t%0Zb0Vw|_2aXp+-ArI3$NC%2ftrKolI`m#uq^_Gt9*IxV+wF? zOLhAx5ZZjzk<$4ywZX57;dcy#fzEtpuqJjKa`D@JC^b$d?)t;%!!g~zUPp*r<4KLd zzt`12u!(?8B(OUYu`y z1K&^tNO>7#Ykz_$XTh{*@*6~EGWtJ(QPTpNGa$_^f?%pXQ~|6XLrb@td<@i#TOhd* zEEGG*ZkG{lvY^RU7<$} zM1h=d7HG?f6we%8iV~iz*HsBWta{gtiaBE;7yf^=tkQ6DDvHcCQsRGk?4=uzjp#!w z5xC1wCVu+EK7#5wRHoYLu})RW`Wmh%sM7WRh) zP;DXu&XfLoHwZe}Gc9l^34h+Hbbkq#NfaAny22Z{2144tfE6I_Em4CM{ZMD!AvIaD zqezUtqi>b{aYc?Bn$^BgDw`Yp|OIf*FBteqpxE+)#TFq)_r3z@abF z2rWgzGGFL8o;8U(hsVTz>INLPHd9TKfxgwa#iRASza%G)_=#|Orc-?+j*rI0-Oy&L~5!X0YShl(QmQFVN|(VTu2O=AZ&uZpTFmRMMqeWVlK2(peO^O=XF{<`|BcJMMvmFa&p?3qv1CUFAfT(vSaw%~XH zu3%MMQ3E#@zr4o85gkPM%|lEOBBDW*HKbN#zq*q@H?xw?I;pTz7YJ9Nk%6a%`%V2D zP-&4CuLv|*V0Q$XbT}?tTJMn*x{$f0@$1ahM#4PRC^S@k?;rq@G`DDNbp3ZNI5si> zO(O~^n0tchUKq&T{{jso;Mc4!906NM(SeyX>5a_oVc3mXiP}|n0Q2gDVkt04v~vV@ z4;!}!{DVY$edwaM)xMaJseJeTb~T|eQcnx&@U0G1wrC~!X=a1Cw%NNZzx(S<<0NmW z$oK2ZlAw(V%oC6(5A2$FR{saT2VB?`$d|~%9e@jmgsdZZLwPCJUVRi{>n?jk`6s|= zChbupAF#gz^0+I|ov7;4r~r#I5HU1Oc7GJshhWA|5{E7Fi39{IPJ`-}Kn`lwyOFJL zE1^WQx{y28Q!OBt=5lEdc)tU%nL20;D1v+T(DutPT1AZFPa?{<1z?%#zZwDfPTcF!EN{wL6JW!`X zUba8xdXMm##nVF=XG^D3HAs#ngc9NjoT2lPtEaYQC2)gTlu>LtTVs^^-T75u2< zChrA{Q;dA%@drlp#P~OXk{Zk*gk0*Hi#REfpkrqZasCRvtS|>Z%gx^3-Q&RF2dqzM z{nNYAc_Es%5T(q%^TT*69k6`#{oPP@J90nByE?fjVGG%FrS>yJ@C(92R(R(GIm zG_C}VeFImbtLX1A7pUB0^x%Dwi6CfMM5?B3u-D;x8&uKCI^_hszo2PFj;cfGP~G}M zNjlNEJ%}z=bV~uop1|7=lcvVluy$GRBy>74Wl|hQnr|J(Ik~)m9iRzp=v;s!1ytAH zp|(^9fRj4}Tm_&>a;SbIs;ah00|l{yR=^*Ttu$wcb*EscHq-}@45GgFw<~zBFGDBY z*TJYpO@BXHK%pkV+Rl2?CHTY~2H?4$;Ab-uo%%(u*Rd0&X}Yu=!4O20V@!sa3r#f_ zg2fP4R!QR_^2z6Gpr6;E@K5<$v4x_f$lxGQ7UPQi zAJ#L}?DwFH@xHC$rTbgA_UDoKfAbOa#<8N)o78{6Z~yQ$xWx@8Xjk#2p{U;_&HrFY z!3wpcK+{{TF9I)5CGj$N7MlMjONw1!5v+%{ge#&JY8Q6{JSi#_oP!15R19ZZPyH`JbB4#GYAeO7iJwGYmS6X2>9OUPpd;>NR# zBN+zdcP0toqH6(j!fEpdINyaq{dc7;vwg-FLlxVX0a^f$45j*wEf06l={cPI1U||= zOoyP)J_dby=X2Xog15_A-IBTEi6volHTXWninB&%BS8v_(P|4u2Jl*7&?MP8_fNOz zEJT}3W1au-K`pWM?w1`j)ceh^S3qW+1Vv6qQVIF($q$pxE_QBzWTq@b-Q9j4^KhC8 z8+V1D(d1oNv*K2a2h3q9EUx0h3;`F4t9C@P^_SgWT0nD!Ho{WY5+RrCg((+DC{>#^!IUwH1LWy7CjmY}|u7d;) zRGLik6z>31FjUe&&6>yj3ZCc>&^aO_ChA$Cc^iP53!s(O+h1fK!1(eL_>Uq>GI8yY zJW$eVy-;HSUENSEs9!SIy>UToXW&bS#Dpk`wo4Cf58XDrCYiU+vsSOK& zTT-a1JOfTUtM>Wtt@H6VMm}lSCsAmv$j5pB-7pz6sSNQ@4wf%+=z8NjHbCa-g7;~D zt$Aeo9KPV;R3<9Ei4AJ>ihmEqE7IwCMW1vrRjHhRhUDgFNK0GsyJOC{(@ur+CoB-6 zHyhv1)Q_91(KXWJ7M8bycCiWWiUM*-J^V7wF2e1n2i+*&&qQycbuTz%&fJL~*g4$Z zt3Fd_;1+5UYJgzk9jHT>ZMMN&! zqGLkHI<83vtDXbr9e0lpSm*CL#uHa2rCKT@C$=Azyu5bh%=&0OMz1AgWwCmK!yl-8 ziw3%l))?kIP)QZwEC`zRA>sXsv4_PW+LvGo9^w0tM6>h+ycm@oGjCeZn}LXn0VdQ$ z-askkw*LD-ZLDGx8$HwlWWsG5P&R4nKlh?MUJvmq;G5a9 z-U*tp)b>$=Rf3QPdB@}!j^+E8uNb<38I?7VL_Gk1;<Ar(&`*r?oz*-1aN%zE z^88vE4VhLw3Q<4$>(}q`R95C*#02KXb>ykITzurH+p7-U%D1t5Wv^Y2b37_Mlt>`|}H69!!&V0`LZj&m+2NA%@u$OjjcJ8|D-%NTm2^&fmBR*r8O{~qV2w5HKWz5!~&%H8ktHLP)? zHh);tm@}b-R*iuei2%1bvlPNf7v~>Vy5UJ08T>vC4X>5rb@+ZW?|MIrd1h_8m=-zX z+ahhSV!_Qe?UJ9IZ_Letz?*I|*f}GkN!9rA;AobzaG_@WC%|Nu^|pF&cm{yut8prO zLLNJdU!tt4ZT}I$!`J*-wS?n0p|pqc_UA89Gx|EXeXDfqZmH?oV_ORFh-UIt| zqSx|_MDN@H+Cay5Q(RNa`K|Zi4R66Z*ZnDvEI2a5FyutnUue5pinVtoymdYn+K(j$ zm-Mr3Gvu|KsYv$V5Z}jx@6yj7c((@Efo(l^w!mfl{wx~*_atcnZ-K61bOGVQgI5{= zr~-icpJwYXk=dixF10?_QNxEOe+%!Yl>$vC={^2Io+Ia@uEda-@7E5@iBt-XP?{sI z->PEN*tH8zFZZNUXBzsBwMJq5^I79G^d2hekOo0u8ocQy58J6)_)1KC0xl!! z17`MjvV9i`f8dhf2%eLwV=oa)%I;b+6o>vAV`Svc%cfHD@_h#LPt8U2L=^pkxU>;X zg)eGKS=O;#*q&Ec=~))NpEz+%eyFRWmu1If#!KwP$g9gctgUCbTLHx+w|2S60hLx% zGo~m1w>%qt5+enT60<$4SnN?f)g8N8@WcSGE{_hE@dny}JW4&1sH}#HdU)|rP)J6T zmglIg9!wgbk0O(Ofzm(a(75u_lOmN3QL}hMR9`-Flx)$krAWqce!bg) zmb$K0^N>P?=RLt5%tD3031jqAE-=IpP|UaO3)fee8jJ=vYa(kC2Ti_`VMv-yJ<`fg-g*n{RB$FdtS@CU5htmXkrRvSZ#+8(zr<&*Q z4P|UMCqy$jReh^oJ`_^aBwqCRM%{DffyplY?R;)Cl=z=!?CaL@o{1?-+GF7(s?1j6 zF$2PRd7qhZs_f-|M;AvPYG;e~(-Q7SFG+E}j}P;9uADm9kSLC(x4PeqaYpGbb&&s^ z<(C!X1IvAal3E?tE=A=ju`6Lu?|n&MU3pPiJZ36HqgABBW>%>xG^>FKgP8@nZ^Jh& zZbOQ6HG*+WQEU)zl3cUp-(8Uz&0A+c>CSckdMYa@5^enRvu0*XZ$Wfb#rP6sRV~$b z?%RF@i;e6E^E=fDxypS*=1DI3r+PwGtGe1N$QYQ&+68nPbAqk6Qh^AvLUau1OF7qb z%=2F`uwFcQ*8yShw$MNd0?z09)Au?m>Lk1t3g-Jr6(aA3>QqSlSI3#QgN3*mMRHH* z5%@|Y6x*)`3;B&oMEyfNoCQ8jIjzVc{&G>0yXVl(@z z_MtZ@{ilf|cWN)8adgx%!eE{CTHod6g4xCmMHq1hjU9A489M5ZFJG!<8k@rbgC$lgZC@we)RkK{#J1m zO5&sWR$mqzQGKYsb2v(AQf_$jR~tTnd6EX^&A}j1m=o*tQ@(|^MTyIB_)hV;HUoO1 zrQliTPq#jT#xI>;Xt*kw+u+TrUx9r>v3L55EUF@7gO&dEl{mlu!`@rQMftVs--M)e z4=Ei|!_eK`p@ejbGzbhJ-Q6wSs7R=wh;(--EnOnQfRrfDnmhL1_x-!~^XC6{zj8hw zhik4`v#xWU>s;sYJ#Lsk@&=_x(DCdEM}6LMGQ~7EDOt~Z4ZhX(uzAe$ig89UpFQ2g z%qw6@TZJ_UlFq`wjh)HpOVoRAQ>P-U{YTVdPl3Rq8tZ8hHV4&R zuv_yxG8r?MnRDh`n}DUJlgXa27CAoc+s92o>&{tvu1%%0xR~N6pU#J#L_ZQy#=ntm z>RkKs?IP}>(D114HZ}sgvq3bd)lOADWgW-32vafvf#a)S&O}0}QeMJjGeVanorFb5 zUnor2SCNc`QLwkIWW)90&-J$)9kjGy(Hk5}@Z7k4sdLn0_zeV~!kBfg}xs6`rO|RRjxfEvS|0;W# zKaipiS3K9>O)5VBmY8GJE_j6)`Xxx1!qjP8l|Zpd&@u?bNx7e>5yovFSR0y^Em28t zpsJ*nfO3AdQHOYF&^>$g5|cIaM^6n`Q1$l7$c11>4|<-XkW3yD-&yMoD0^Lave_Qn zHYi^~>XyIA&gE z6?*&jrYm78X?qo^p6b$SMX`kAbw6oUro(7Lf{s&{mMP^?3EeT=uZ$9hLr;URhMGrK zf@G|%C^}e4P%^OD$gVCNi)xUEdRK65D@;+#GDIg`Fwc+Iat4eNg~H0_bx&Q!-*Jbm zBpHJMl<5=&Y!9k9HR+?{c)s?RR|)m29xj-MOziVtGz!lJBRgEq1kqx#Ylm+H`g782 zg0Jc4lNg^Ak#$C1-kIchFQ)a{cknQvKwUXZdoC| zBQty-)Wj}ac#0*r_zNT1YL?JG^O%F>FxAQp$`^Fd1$Z!@h4SNTUB@r51OB# z(4Qx?Jx${aJ-OZ;fiw@3iEQ`j91Y2_TjK;>{UY_#qt#WtTFjJx@Jk~B$!Fglm-WZ( zyf~Ckm`uAoDxd_4_%uNCnQb4o8_U@N^qCK-XzOVQm+yx(OCHtT)Weti;$h1|wEH7Y zZ7x9#m2{`*IZs$f-qmoLt>rA}UxJM=d&PIMjz4K6`#E&R6Lg>?ZqJ&K8YE`R(oZcC zUH zHjzPLp2a~Yy}FO*Sm=qlRX)VTJR3<+jA?EH`v%t=O_arFarx<;QOfI^_S?#J^~$W8oE=o0eAP0U@fHj%lHBFnIFSS3me zc0=yjQ)r65$GfekpZD%tF~RXZ|7n_Mmpt_ zE4ui_qTm^={n?=;tL;Csxc<{$FGoJqzOW(*Re@|c1YlLt-VON%k;a!|#ogb{Y45#q zGtlM7>GY$=uKq!OD|E9Ktjn>#{ySmnwNR@MhfKZbfjDW7Why<UnJg%GkC=*awc(Q}^6#mdPe;m3bt&nI-!}b7gIC!w&mw1)938(e+B#LJbffrslaZu@aQ#K6p8w%trUmp~v&^#lVw=4Zny4K78@lN=i;&f=6eUeqjmcar??4ZTz zxx_WzZ04X^2Q=JmJL@5vZDuf)u^#H+Iym2@lX4!|x~^V4K^!ZpajM+R(^*9;^m7dC z6maDzYQ%Z5ySH#^Qh7`s`Ilpd#Ah)bEqpAeuGXdbi4`h1#Lq1+UwLO?qsnZjF~53o za$iqBvg=U-L2+SBpQV?^fUHqj-d;&bKJ@IzjN@ssrn@F(%pAQrZMcJQ`~mSo2x^+O zvtBUi26WgUj-=Fn%2oMgr{dlE`Jy*v4C;7XgOVTd$AnFtgjuDV8*}K$(e)C$@ET7- z*XV7r6<>o!5lPG0@_LrjX2*%DcPt);KLBSnr6JTVkmZ@)JBi`^yksuBca9WvWKx(Z zlKt{660evrH78oX)!ba3S^654{brcHy(g~dPrtVnKMRkR@8Ep$Rr|jF zsXt|jwKB^rFQ%2CwLp&(r~-k9IEJf?jzMN>K7zX zeg=&0c9Q9Gn{kHUk`cb@#Wox6GChcMG}cmSOPX?qwFLiQi^_QYZ7O{M(n1%)*{fcB z2Z+7QgATE9))=Pmu6C{YGwHy9g)B7$a4GVXLX+vEI94&?V+q8PzrBLccgdflj8Z7h zSaXaj#4<3GQ}wZmVk?BKRP^8HJ&ovQz(@+iu(TB0LMJQ7D%(~YrH#byJZk}Nd6LVD zgC#NRY+Zwxd_(Pf0wf!KS;ROWWgu3#&WU9k={F-Zl&5_g`+Xcq#3afLkK})(kdZKx zD}OxkP>P-h-9T*;yzKn$LSpd9bs^@^Sa#-tt}{}B8LdDj$r)?x{t=7OLECAQwrMBG zoYPe%)Xy6hlELUtV1(0u*Fs3|{nDGu^25l?eyXUP(MWVVToLKgfqUtl_ZGnxr%|gs zlT(FTC-22TO=i;?clQFw73X<)`YG^oH@MTb4E+vd!|kmU^L$^RKN=+Biuq*1kg5fb z>a4hbhgdlV&$(VwH>hnH)hPVo1;;>(s*P-+&?j!W#I&^Wwy#d2iG@_gelC2&S^y;Apr4=Q^uccW^hgvc<)2yEa^Oq3l z;(Ep_>b)Uq72Cpu+@5G>c~RY`x5F=uDBDZ>wCQ44Dt#YEX>ygN_8nBPBEQe=0dj{_aeL!!_0WnHQpFe^_H~I%Bz`WFWDG zJgsUU@gPTk>J?8-gD^4Xpy|PqLy~Ooc^I1`sNaiV3f#p|6w!cH=vldtG`zTqrL76w zli5@tTi|sRBo%8oS|)g!_=-luxF3b*P1*LVQ?j52c-=MzX{?%iFAT~N0q*+sJBxJ*ZZlIcN#`(f9{mE4NC~$tb9K29~>1M*QD)Iw)f81wfRz_H&<{+Wv zOL1yemKL`$#}bROUj4SK+#K7ACb!c}l`Gf9-#;xgmat@ugf~)|RB3$U<1h;nTr#iL z*ApP9oA)h-H@clwajdx)AcUmWiL@NgIi7XC#T$WawUBW@&u$k7e__913OqZV)lc6^ z9l+Nxx<}irCR<3i=RTK`<(|9}&16r*$dg@maC@b{{nGZ}?t?H%%D9CsOv#;N06RRkS!+t^SwW2{xCc=%p}fixYS(?QokEj zK>@Y?{9;+Lgd_B%5X>Lf?t9#Y4zi!2lgn!}(9-FQLR+c=Y1-`ikg5BmTVPqph^49A>oYA~00;8GQi_#q<$k{RRE5L=+!Byg?7|g4kR*d9u^n zO2V%bBR$hAI;{KiEVVRKDSE@HiA~A5wd?ymU+4mOEMjUfJ+tkLfOLR?tUsAg-d?)k zNV-7(k?O$|X+I-=*c2=)?~_rW3mcgTv$hQob+~Z2c`@S4Gx67fH`l;|IZg6%SkPxUKL<>fltu)`u zVNRg3)6r@>81_8-_|5zWE=F^=_R^5@0B7|5Dvjv+xa&UwaMQh=q>C7_ZU7hxWtP6V~u|XmopThzrL2!v#N3BSPoR zeO2st%$6Sw4!(Y$+69{{s2>#ms#iBsFD#IkKg08s2YP>p9#9)#Q7C*pp7rfw$1UZ4 z4o=C0EB5gOCv7x3Rbv8zjK)FhTMN1$CzTCVvyJ#tWcZXkvvWNwm_oC|=oF=$nDO|` z*E@aD)i@Yp?D5veKP75-v1AsB>9AN75HoK~X*f{%8-2U8J`~D!D3tr2-nW*Re4;d{ zZ+I}qchpsy4u-k@Iw!GNQaul*|9Fg!4^M|Xm)`-^mdw)MCU3;?pfFJ1e7v4G-k2(U zdxYLghx*}z_03c!+iU|oY2z>H#Psf*R4w`zWYD*HbPZfmoQeWN{^5bWpV1c$O}Sui zVh`fG%psyHV+i|Ts8FG5%_y<8vullT;qa55uuE-IMoWQ_H{ue@)?PhmH7pE`rPfgY zVg6@qQ+n+Vc(|?>D6N(+Y;_^gPdM%Asm@ebwrDl#5oB(XCck7}_$iES*sU>XNp$A7oF-^4=bH%Zb&r6{9(VFgb>CEo z0h1}qr#%;_s-jn4XPAhBGj-;?VJT~tjn+I40oR#MJd@bzr|j-88KQ{+wcA8tvXVN8=JeY=3l?g&BT`dY>aIov3xD|)aX@)Aiq-Km!XnceDewf7K~NK?7^`7hNW;`e%76vOpo_B)(PGlHz_ zM@Vdis6`6gcr zY)ztnN<>xBCfdIx=Kr&DZ_Y}foZ2BKQz|I#Zo)4F|BB%K>m*4$mAUsS-ATDHQ)OZ* z@@b374{&E)g}v}gqlLX&k4cL%+hS#}UecfO@6DfSt-JW|dnCD!0z1RFooPj^)!~|bPAQMg&MqE{OQr-C6F2wE3tXF`v%JMZU3~ufpiwi zrSQtqaOLqk1@T4)8D;1cM&CE`Qx`O=e%&`|L$if$wfAkvVOT!IE<2$`k+PueKjjj8vHsLrHZQbc=+h8MYsQ5r8qUx*by;Br?z-=8}BCiE-Go?ARcbpP}s~d(qb=$y_~|q@pO}@ z>p32b#5GxUnCkU?ySIkC3*-0w+Mk}*4ede5F@Kht7&X1k(TkE0q;%F+_o6E$W@n{& zY~3;N!3|ay7NiQ@sqVS6T@UB^6J#~WcQuBh#UPMpi9Osb=z-l!l@`W57Sfun-=Akn z-gvU{un1SA82>Ws&n&bG^R)3!SMmgx=P&zE51o#5A{}43d7Z{wQ&`qn>2forjh)T` zoQuwIe2?Yt4~6`~x`u43qFI+e*I~789%O*o@M)j~m%)z=XSmwFGAki#nVDER7MD&3 z-oP7{;hylLaEh*oh!~C88i5^Ozqg{{eZ%N0-80Sj)bN2C%sP@Ws%Z4N#A*fxY6LhL zSBpC%7462Gr7`Fd?;i~`^zp<)$K<+Kr;A532)_#BHz>Wr^O1mC&veCB?n9)FVrKBN zBkl-941dOx?b}J9HfMdO^QCLkA)WAr1Ie@Nvml2zy)clht+Y1DJv|hz#gSAq$aNZW zEi5bEHx9~0=yCX@&qBqAs$%o(#rHZJM)l9#>3&3|(lMaguHrig#Wr#L5Lv$TqnY!e z$@XBZ=0d*_*tuac4tQ`h`T?Jcs*8c$e^$rJ7+I2utQKY!<{o7tSe0s0&&+rO-C%{Wc@BzZpP6#7alzb zuA)PofDU2Ai=pJor80|A=EQ5=%4sjvmsLuF=oL*vqU@LnH*O?P5y#8&hl_PS)p zN`tAMcu$sn5w^#YVg4*7){OU$m8qRlBU{_!;-L03 z&1Z2f>cvAJAd`MU?=zw0DVc=12*YjdF{T@d4j-eOvk=iASGpZyo9XN#tW?DzX_%&; zyBm&TF-o~2TDfFTQ%U4@w3Tux@h<&dz_#BcE*T5P&}r1E{kAJ$c(RdMe?ObDs-P`$ z)>+MQDA3aFN|=o@{e}5c%2J`k-%#lIHdoQpkS)gdNke4m5tnrEw2xlOjoX? zeRml2m3jv$N_-kb$igxv*+aOZFMvsVMui%g2Av!>pj%mHtG{1%>nmvn*X~F^9&3+p zUShnO;Cz*3G>dDBp$IYhTl^;6}J z^{rYT!LJ&RopP&`Gls0t#cFP*w0^eN9lQ#R{y||?!Znp>C#pN=>TSby0 z>E%hsl`$)dR~2+Wgwn7&gd0tnWL}Ww3a)r7E{z1YI}edBShkV*=Uw(In`a z7pF2*Rsm6MVb`uiE@s;(-mvZpz$^D4Vf0;WSOzjKhcBJDx^f{uY$AOQQW z0N%IBf^KEz1}2m;l$dr^pTL6Q&h@?BtO!DrL_he2!nJidO|*&8gUwoHLpm+~IaNrVTG{q_SXdwX>NGFS7ry#>@N|nb!QCYT3k1i)scV-s_)ujca(mQ5l+8qqA8kV@ zx`uzC)2_V9fNggQV=8E`gsMV$E!Wv}J5oU$->r22>lvhl)0$JKz#burkIyW+h}MtD zqiAm4e^GNV7I_Q_cZSjDi#;t`6$lkXU&|m&7EL(OSk#tJuZ^M_&`UN(B~?3o_ZeC= zol#WG)mx7Z<)6x>+&Spuwv>;BEjqxjh>nnX_#Z}DA?E$BSlqv{*tTRpdY)$+k!(Ab zhT+C;C`2-uHc2f8Hw~||5I){tXuVExPJgC%i(@~?D3JGMgR15*+U?N`GTIdf6W3uE z!zYTF)%izmX_?N!D%{NQ!V}iONtMn^zlcm1GTso0WL%CYx0i2<8U31xew{#Y-l@i} zMrYApG2Wi+Z4FQ{t7u8;T*=Pg)~V-Rtp~)lOVUY_GEOwVS;G3$a+{=~te*k~GE%3HSsiHZ@wS`HnHCLsL|q=}zNCsgm$*!wV&88w%pe_pjer43}MoNrY(kgSI!;#lJE}k5fn3N*(v|VcX zGgIs1LVgCzJZqcfT)-J{d@tc~+$kBD?2C0go zAOx4V&durT0`x!VLY0&0OoSY0H^w(U*ZfRb}SPM+K9>&xFvnio26%!=Bc4aYW< zF}Y*31FNVoQo^XS0XM8CtobiBNq2}1THLPA88E8!>68=_KbCQ*^F@LJZ{2Z>l--s3%r=o zp&Tu5p}vb4jpc7&>b>v?@-h^;Q}}3;@HBO2MCNV+tKP?X0F1?19WLNJ%T++Y?su{;=H!XG|cLuV*q`VKT=vOB-ok1)vDW^eK0B4~y2 zl}1qZ>1#OC9&Ck4y&0aawkcWLaKK zf^5nwdeU0at?kNkmoLi>FE-^O0W?w@!o57$H8rgRlpi|_j6ujO_7V$Y4%90cUY|jV zroq&9XncLZliA{g%^gA!XR|BdgsQ}|;gy(Hkms1>V2UMgbZTgF|GIsS(nZfuOPTPp)8dywOq-W{}o=5W$PKsdhWD_k4md@j1ez zGh49|_}moH&E-0?^dM+&FHRS8-Dx#@oRc%IXw%)5wFp*%h&*~GtMOz%kqIyQBl5&B zp3Oi14mz9bKf*d+W6)tmx43 z#;OyOb_HE}unMrxImeqjzam{{iHOB^3mikI(~3hFi#I{{%4O)3TI`_5lz zzFsP)0>g;p6CZXl>4=<@oFl&J$U*pRYD@XjOK~uYqfP6(w5^5;HH$!y-IJ=b0l9d~%Wg{b zV(Aq;sroKbV`HOxyaP<>VF*=I+~>-sBLaI{uAfF3vsX^USgskUHBjXGN_03BnyQQl zwiRB}yid>6B6%a&-#Z%#bQoN9PpU>}lrCi>w)~|CN2=J|&(RuG%};(o z@>`H~m7`2cFNELvmCXp5nKMGxu>SHi6>a!;t8RYXn`1bs6Ia31bX+OdSyP_RtVR8i z)Vq0Q)GjBH=E#U?>$I@r^|4Y(p_V|9^=jH<>ne5^aA|Z+GakRIyBcFrFMgp>>8Nr+ z^bsl)V;Ah_$Szdqlw-kqI)$}cO-I_6}*{c>{$sjFp5&RQ!J!d zy2+-#jOxhacZ=xe6kmBZRBs?>XY@yEQhD(6nw-I-* zOQ3>;R_gSXZl_L!&Aq3XedP~bcaYG&&uv>^H@2Vqer8Cc8z2k9ASx(IO*dR~o{bt0 z8XW1h${yzt7h0Rr`fMZFrLkGOIy4eL0={FDV%{;J2S_cWDBQgrB(7=FzU_{TEw z^4}GjIN!I*9TCl?w4%2Ss#bbskD%5uIsd5kO`{^|MtsGwg)ZX6MImle+w#0AXPdn@ znX#Zq;rOv~cP=wG-Myjf?Kus;AMuHQ$~E=ep$U7UyWvjo87u5?9CEi_35#HkXZ^;H z(iv>Ulo%0m>8k!?gChEx(wv;*2!c*F7HhpI97e_hSQAeFgIw$vWJG<>05ajEOx4s5 zWdS?l?hQ`Q_lh{Cn7I@*{HGBUKe(Sme#Y~AraXys%Pea53!r1~F-osfvl*pwspXn~qT9H?t*fWT+803WE?5W- z9}CG#?x_&~IgReyw(8;v@_T$c9}No&Vj_fDx$wT?#K!d5?Uy|!cwH9Q+ayHlx^ISA zrXEA|^KG7M^v;y47cTU9FggR;>GYi7>oWLsM$D1sT-l|ud`_of+aX0PmVNfCqR;Vh zq%nGV^+(UICl)E?)_`2S3Wxusm_p@YL>0s}k21@R?%}C&z%?>iYQ(?8#ao>|Q|Q_A zGNeb)!@9U&=4pYL+~ww#>a%FL4MUAc)t$yzWFymlhTiDo=7lGX{f`E9(8-C+A?|~E z?pNJp=0rMJTxzBwq}C?I5})JU-n8IYixwYVw-)-UunxqiC`N&ghan=F5Ry`>yLBwQ5Cg7l&_2ZmSl?7Te zIw(_1N>Iu0FQ)8Dze}C54_MWn3u;pKw4SgiWA%!*P8^U&Y(62K5Tb``((bG<27EeL z)byLoU)G614=M_6w2`)@dHXvthw?4m1S-lEBOW8PKc#)D66v3q*rF%=_zP4u!N4xv zD1yoqPn8%)$Khpn?6B^fr&l>X@C6-~cNJNfA*T-h*!ARPW0wD4Ob)@{mTirfqx-ICx$X>Vc6# zS0k3mEY!8Sc6!c6Ww|Cu561S{N)rZRgqUE&$ws6;E#~XQnwqCqb0R&uvU@Ls!U-6n z3vLiDqJx^)3|S$xUh!ethV1brkJ68*$77Tk4#wep?J9QQ~?ehnizkkGLI{MaA_?KTdh|`l<5>{ zx+jth-*+O1r9*$z?}J7aJ!8-pD4~&{_-sc)PMa)tzU*|@o%Zc)yyi!rvxU5z93P(y z)VI$RmF##{FEG)$ER&^6U?ckK0FK_{{*1hm z(%^A?tczE3c@D9rr}ogI@-+J5`=st9?mk+%Y$Z0ibY}O`>iOI3D#g$t!kgjoFrPqa zwEmYghY!5LlV-SWMCYqfhWj~KltW~Zd5o)tWs#fV<8fD*^H9#GN$5UKfjLA9&(tx{ z;k{6CSU_AhOHB*Xo|zc6diIdVZ% zoikWy6e3)V5w`-y@2`}VsNUyG$80~Ee<>>_H2H)%tdS(ile&C3`97z;bfEa4kx$)? zYsa0&PfQWhMQKD#bL8mduIo?{2r+tJG zt02fWirF*9I%iece#eZ9?3JwfwJml32)!kuCj%vxvebuPwqkI&)5?zh?DZ2{+D?_h z0~gw|kz<0q)?xnTyAh22#+8mUNy9@42F(`4>2)LNTaCeJSJsj7j)7PDa74f7>wJ>? zzdilZ+$=Pnwd!_<^IJLa_!xJMAR<>Q?sfw2qsnwp%%(Q-F%VA z(oIxid!hZh=63dU#}&cVcilG|nfS5*9lfx%-FDyZd;Qjhfe`H5--dK5{X+3uUP z?fZ(a%9C>w?61Pz8D5F({G!lFLUhDtG&!(aYG)0BQtFwE@x?uTLntZZ{M0v%CH-!n zdUAWr>E|hl4qK3_2?IloQCIPujGUXg&q;NB!H7^sExFf3>w=H5fSq^pYF^3-u*W*jW&jF8X=`*DYs zR&ZEMvq#MDwqo@TQ(lP5$&hqj;&WtCRx@{6Ec@+Fty~*1x`9ZIgGpl?S1{m-)BHA0 z{yf}5^*f-5lLq+2hQj^26`C?hJDEC?Sl1hgVsJ?VYr0)ja;eAYKm|=zq8ZHp2 z)!zY7P5sq0JMHLwo6F}Z?sfRK)wGMOLE(-5v!M4*i?}xm+WXzxm_Emcds1}n_6hSk zCxdE6_7kPqZRuS#wxD6_y?NJ~ z)wA=n+=Bd#?JHy=KK_rGyb$VInEe$g6D6@AKeVX6P96wU`hyOsbc*$jwf`J>9X9>RzlF z1i!gTZKB%sto|_d&UVhY8~od+&@z$D6XLX(HiLIg)sr#^h`9(xy#8mz9f*;oAnV0a z$buGI;K@zdb~y3d;>%w4TvA!WPP>Vl+%K@;15Kak-Wdxfx^n8FO_vgt&Y)R*0>Qfd zSii_3<}GEz8-Ef#bcoFH*1>Jt%xN;_0qde|xdk!JOXecCF@m=CAQqvv#MfRKkZ9|z z$N}E5=B1L@^Fa^n$5}BLR^E2q+bwQKl?SUQ)!b|{+zfkfV&k>^SD~E0y>QPE; zLTd>m@Ho;!aur>Al6KLb$_Q2a9rWsF6~_FVh}{7#?A&iBPqs6VmJ!wBl|q%`OC$=y z?AftZk@S74^hX2D%%jnIYE~{db~z)x=UykQ0;6Bl*^>G;Xq+55oNNOrXC!R3Af6Vh z?h~^Evu6$#0s=gLQQUXdVDn>y*+UyUhq0kk;(!*knaZld^$ZrVpW1oGmYTg%T^tVg+H|{*b$?U}jO_z2Pvn%tCH}$Dv z|B6i5!9~sf`X2CA7Rp@!NMu-RX-8dMSv&*#b@38Xp$}v{&8XP&D9Vn9q;G)gwn?F7 z&V|kB&W|@RRO&vkvz@wFKqP%! zMfZ65BlEXB(6#9v8~tyih-6#-eL$;HLuZDni8|E&Mh5=}B;g-p{r7i$MB1J@*kS+k zFaDi_YK05lpv@SR@lW^3fB9?V5C&tro`|1#YFfCkx>JqQyVL4hX#e&(R_*M$3BfZ&B0gn%E!66#$+hED%WXkw)r(HH!R4%zLM0 zc?UShUmnNE1g%F=25&JlT*azL+LT67=LUo zQ>1@Q2&hFMEgX;~15kzf0stl{Ap8RR1%Pbt4J6@KEKW1Z)GPZUE38Gu!Y2>7)*P*no&Nus?ba4p>pZ z&n-m0HwckY+z#B|OI*&|hf)6Jg2>e*EKi~HVk^(!^J*=)=`R3LaMK?XT>qkJ7*L>= z90RbQPXJ0>V{f5U;5p#9yxl8H;IcW78wV(u|FIZ>m!N|gq#t;kPVT^t0yv}@W1U`q zYZ$<64#9vm_pv@2+*V94`cK3wB=dR>`Bggyu}frDeP*;qnHYGLdAGcjM%VY?S^pk$ zHElt(o@w>?2;h=d7Nk}Z2tcFp1}FyxUTg42Opqh5hV*~EH?W*(K16#JCuXkk`4{-V zs?g3tF>hmv4E&ksC0LTyP+OsD)xB?Q;Gq0p*EbR>)QAsvw3X!?{@}<|BsR^@u?-Z5AtGEIH+W5 z+)ykMs2U}4##dDVE0(|B+hb+$8TsUUH_s$urFVgj7-?grKMrKl?L%w@`~z>{w#b${ zB!UsI4Io|`-XGMBSzQu+2VmlXstHeDP;giRNHmH5g%bql8NdVHUIH*34O@<7Fx+m_ zGbM02!Q!k8;48dA5ifYQoL-3U^!y!w6an$Kxl?JB>KSp0|DA1;BY0lS73)hRB?u@Q z@6y34M$Zdx71|4h62z~-BiAlf;rQ3Z7?uHsc9i=9`tgZRp^7vjjp6s6%E*n>I*yL) z+xyQvm}|ZR3eMp3{ZD-qWoo{y(I)mk zfR^q0vx3YGl5Xq{h+9UXZGM-#GPil$N;k2&NMxNYIAdtbDTMx~L@$hGva6G40_5fG z?5}~5+Y2D%vw!~sJ`u?84DP1be?p-kGV{jBeDIJeyP)DR6AM0&V(c;F4}`$H+rL_+4PfG!R8;0pxu* zTYyR^4o(S&)s_h*k2{f;bpo%CIN5a%8qzoD`LZ5I)MWTJp=H$asWU5|&zoZbHs)(5T68{b|S`hc{h0x;Wi0N!$lp*S~} zvo&{X+g{9Kd%-*;`OTCU#8RDr=?n_XAkU^QYlj_c!f%UbM*xtUr~}(V@>@WSvyX9! z7D?+JpbtEJVCI^I+Hw2)hqp2DJ)Z2>?SMh(k-7`waK7X_mUv~*JtfjK&2cg46L>DY zkuJBp;{6@K{Zz^{Z`of0kFdAwHSGzHH?6#j)->_E@JWNe`>71x5T}pqCYV<*x(_%1 z^%0yR*{iEa6o%Mlbjsc_?nbOd5IUF-wjTgO=mM|~EdFy=9daDOps7QP9Rx_-Hv2Gn zk+N0Fj^ikx-ybn5ba`?L>UVzN#@ynykn}IHp8yxp(QXIkLf{Upxb2HYU=(`{G9*Gu zd`KN@Rh!P(cd{!dZUgsXwBy-7sWWdL*IWMP>aXMYWZx1Qv;^v<*Izz$WD4MbBVu;x zlelE>>4iVo8S{XuUX@r~Uf@4}`*B%{Gu)F$a9-o%rqRqaX*~*CCgNmT7eWVqOF-5~mszkP}x2^bof&;4d<+rgkj!R{`Qa|8QTFh+*b^{AlEj8L5PEhTJ#9!ZxC}Wh`1;5uj#adGK7%KEUO=j$jus%^ zcL6BF60ZG40naq=k}wbXyv?rPyjgb7x87(4%rUo*KOa0$-BXcpi<|k^P4eH?*~iMA zLeOfHFzw~(25o9&tnZY%SR^&_ivT}jE^6i{5%UHTK4<(^;5kG2;$>;>?TpE- zS~mr^cZ^QMQR;P$09tNEKfnIhl~u6uqWr7IeriCRq^Q>k z1{0BUj4^L({N`kE-4Ec8_04*N(p6ix_R3Ru`SyJ!_EQ3W6 zoK)oh4Q^Vm==aZt5H0#?Ea(|PSxZ7>Yq=el%RgEGRdH+zk|BxN--H14Kf57_->&l#0Kb1^|SzHJl8 z*mwUr`rtmnxo&^?XGL?*!K@T7#hj&Hxxd9T(?GBbKF-`ItM`Rvt5EfJ~CouLUg9DkyKy;9YkuQc17C6ZB~(Dl0;91VaJPXXXGf zWmAu%x3l(3`;L$&4ilXXN0AZU%#T#p%v_CRXS{i&=kKzR|A^G{{CBh?PWrTDs7n3Y ziqm*i|K)%CUxQ)A1}YMTLzOZN%M#AoZ+dj>@_z@-{|w|^L1_-qfQ`Nb{aoAKIUI>E zUgtR^It4kwo$~j5@)(*ZaUCwLvaZr|+9O@(V6r*`GezfuZ-?8~!tZt(ykw$wkhX@?$rZkzvKhvO z*jXw%cFXKQQ>-ohdD+uRp1b{JqKATc{t|stJ2Z;Ak;!hu*MUr80RC1F?|_8Fj*TvP zS8`X3(?&pu#*=8hR`9M8D>t9qR1d=Q9&yG} zH{q?3A||$l9WLJWAEL|Q@PB)ZKo*`z15ypo`^2X+VV$#FmSY(BND8Y1?+9nlr|Y=} zB*PLfBKj%VaP3$pM5G=b^o)MPju2v?31PO`Vbyd9`SM{k%9Depyqf(kACu<}Bf)2D zZbDDkPYJg>d5L)=dvQp>qy`4*AdSoGuI8ej+@B|dzW!SM;z>8nHGYtqlnb0gJ`TeO z)IO%0NMIId7ctsvS4$u){(Tt#yOMD>OC;Otn3eb4nO$C*Nfqk?9XH{xf13(%Y@}IB-!Bw1c0Ey!vSFWam`x30iw3sa~)>r#T>hf0vAr+pvv1D!K~a?jmO6bv$&C8Te~ zBQG@<{zhZ&2780&qCwmDEw3v1o^k3}2P}O0X>SpQK13)*Of!0HXLAYixhTV7!rIK| z!$?51fFy!7JGZ9T#rLd-;SayI=hZjM+WN_3N**lbc@M37N z{Re0xtD9)sVo~RU7pT75vIdK=nROI7|2-*yWfzcP5x?-wn4p>Z54FfnA68IIMeZ-D z(^2FZA2#J&fV{>7m~AL=%lulH(EXMV1-WXriv;~2QGgaD92G9;!bF&WJcB^`(yF2p z!8O}Sc1I&*8H?7Vb0dK-c@u-rzYA=kZ&!Wr zUb!im-Oto~TKZSfvw0IW*wayvSD-;#;=>dS*QQ(FKxF^s=JQ`+{2xVyN%_wGEgla^ zp{hHJ7XkEYReDFsGJh_8S6q7ln+w1hb9N_4Y|dg=y{2WND(9AL|g+oXvfuZk$d zilp!#);V&dom(aMbsq3qZ?TL9VulJ5>^HoI4N*EQk#{+2dI!uOpMt_H zYu-QYo$2rGKXz(&h|&ZIqG!1;z;d?d@Tik3tAi>m?-NW1h8yj&!FD!+r6L)4?~4J>xi)3R z7AQ495KC5B%e3_JsGh(LCiuMro*7{q?ddxuq6#}W%?j4Aa?^XW{YEGQX9}Yj5 zr&f-X(A)Rsx(8{ADN)sW*iD^h@{VD+)>WuMnQC_WG0R}=n8b! zECN(Vz+;YPE4IE5_pr}U%vS$TxKJGE9Key1!=9PVBu7P5h?N5xYO3(>if!%*9ypLc z0B``#VgU(;h2#|zR6G!|!NoE8ILhoQKQc&U%dxNq1eB_MACG>yKjeGxe?0-#v+E~* zKhOfRvIQ?AcoDoPjsqRW>1n3bG7WmY;W8rRA1so`Z~%}4Dmbf-zig^!mL!OokY*uO zgvPT8p5((=RIg~hF>lm`vVBD`aMY-urGT4`N^}e94g85a65^U3@R0h z4RIDi8`XRS%m&`glA)D}9xjoY@1Xd9E~+~-0le8oj&`p1yjsL=ua37qOuu>KZ$7|P z2cMr(%zikG>0_7)nv)&IUuiw!w!n+1;CRcY&l+V8(H4OnA}jvWoF8boO=houU`YMY zqh@!iY(ob)-jsC$?a2&1FunUVHHsHGbnx#}2~g1&RykXbiWf#FNL~bFlyV18QePn| zEKrTB6Q0Ba64Y1hfxVv~dC)o!ogXiNwO!>k%}?)!6=~jHw%k$;-~ZU`3i_#=#oOz} z+l!prrMM>viVrLk5`iW3MlYT+7ovI7n?DEP_?}NaaG~+}^3he8IUMw^Tp9C;9l)Qp zZ?i=&A^bvglv7XMQ$hXh2IwN5ND6W`AGGs#5RNJud_{OJ+pkkny}=yhC)g-Ml>T`4 zAiisEIhS)gCWKq(4~T^>$5Ow#*9y%x(~PZ+<9N}OAaXG$*gOtUt=DVddal4Sq>zka zjfM59h;-}CI;n#|0{C-Y8ivikVS!Kna4MSC0N(v-Y6Ggp`GWWVu%_y6LbK;iqAEDRvp+P_b=(w+?@AF zNv9w2$L79lTkx*&~>GZ3<*YOrI{}SHlVJ_7+eYaKN_seo>#$g2}Jw zV7lUyy^AnGav?Q0+TB6VX7aHako0-;5NMChqOPecSOyUq1lw>hJ-#P7nrycbNW~jJ z>?3DOI~T9=H*^yh%=X;u-A&nmIOrql!^bXNy`pDMMKOfg2%LI=s-wSJyz`BX_SbL2 zcSw30Sl9G_30!m}>FGsh_+@s}2VT8{a*$5pC}jfAk4-tc-#}2|3R+M&A4$t&N)drh zo%!$5hvvTN4mKls7EczMsja3E#&1_xu@a7aaPfmY`%ROP>J1rVmcM0F=n!hArsPnO z)dDz<4YIFK_bb0Ng0(t4l9s1qx)Gh4KM}_wog?u9VkwTU(~ksw>Bhk}PAfzNioT0n zk-;bhpSGK}oa58;3;?Bq22F0~cMgCW&r{Rp zyg44ga1mQX2UV>)nSC z`9Lm(Z@styx8Q75y@Mdr0yIE${XMkN~1f=7~m$S=L;StAD!L2VZ zTyad;H<~mW0W>)grOa<;+B>!5n!uD^_~wKQ6j1G;DU7$6I*-s2cBuy&08d-`xr7w1 zS#S0AnUu@$PFu~vDyGNS+!OW$y(>mK`{Sr}2 zFktV1u~Pa?FRFT=ok3Ht&+nF1W^SKd2Q!l|V~NyJhu5G#>@^q04Az(guTw@g=vaPL z!rK8b86ZWCD9d_TgPip`<(n)z-tc0CHY9_xCM_oaz^bxln;Pj`4h^^C_QLttM&aOd+{NCf+JTpPI zRW)M_OKO44dnn~dnn-7Tr9W9!awByEx3nN0kw;(>A?#E}c4%G8Zx_;lFkFOa@%aE~ z1wLIbeseaRIiSWNl+_17`ojXcYs`R@-=W>d*}uf+Rueyy1QKCPL%%dut}xu)M_`X& zm&%-YR66T7ZeL?VG*A4+(+^&GM0VaB#Ga|6=}0N~s6z;NIwz^OyJ_)yF9O*YxJN2F zmZW@N&P_8tEqgS2nwh00`MKeUO?}mkPYj-ulGyllls!pqn6(W z==9Kh+VBW`N|o%jwPszYNY|xcqXqLq)~mDW2+3q^S#!wkRF5RtWG;o2ExhQC08N7f!loiE6v=s>{wTPbTxc0$7E{$uX%3uJ*MubhyMKi6B~+oaA_`u>E!Vbzy~U zmBt{#lKR0||C0h5ErZc~?GP;vNy^u*ZRRO*t`v3+MiX(xK`Im>37{Lo=HliK-yl+= zZj1Srz9in2-FtgTcN-|0C<&^;h013fX(6o6AmMI&0K*Tb@1M^T`Io<=kv|@RQwarq zjfOrl)wIalu+y<`b|FD#pbmX{}D=e+QWlw}&c$aYb3V?HS`cSM`0lGDRP}W#q zC6-KR&GxNwD9xBZyz%M%Ahy|Jv^1WB({lRWnda5W==8~t^iRv68+eW~3_MV5EKNS#FpaSu#>J~Z{_4(1L||uN zIAhI5YQ18^{gU#+!iiTzUWnl{uyanrI!d##fj z0-B$6m}+i~{)EM5vP5F{DHwy1d4=?uP*^W@pWt5hpIwZmTaL@bqs>kvBQ^}WBX%$W z;3l~bY1}Gsc&#vkatC3Q_{hRN0AD?#mfOrPEx`fFrt(^O^38 znM0VPd>RwU=)(Y*Anwz{4dXKGhvmw~^2GVjU!nPRTg4f~uc;78l}2O6{cOs$!GBs*38AB9*AnyPyyB zp%1L-*XgA0jmR-&A8U=9=C_2ye_2Vhw>LIZ#Y&JLD8813e{o+0W1Fp z#3XUr2yZ6yHaQvgpg;8V0lR#&3jQ0=ND39}DzG_@T|U0}1(pOB9K*_?qNdM3ecNQ} z6X|Y|iS57RDXbl6_N(!G*CP_A$XE<_y~oNKczHPw8$h5&OIcNaZU6qHoK%%b9*>pb z9pq>%S%I~Km6CI@9BYSYq2=ENJg89x5EH$|b2wNN4rM~-$9GSUd#BAWOD;W%a~h6S zgKEw$NA|pP4o2?mpZ9U?vsB2^ZezNidLVAVLw)&@R@7KIbt0cO7Gr)I+sBGXsKx(f zIvDXf0xAQo4onLg3&)Ju>rfL@#h^5|b!zVJVZI=L?vV593~LE}i{!~VKr`u6>7QDl zl5-y`yD~868tnkR=^RZe$+-fzFHudi+>G1MMf zALhQ^4P$JsRQ)<3tky#n0cSNi8nuRX@miJi5>PFXf~@FZOuZy)k(Sh!t2H7jzK;!y zh%`>E3#n9Kld;3%x~mmE6eq?AoxVJb4T!eyVn~67D0RQl8*G_k2$PO}(cyOnmQ876 zfL@LB0oGQT&+{k(OO1yO)z2)@qHV8pK+mFM)pGXfBSV}H15v}+>(umhHfR41@0m&=n{=54&aKPTgR5YcyT&MNBf)KuMA6 zL?>8J$PO(NLX%G}5MYSXGl7KL%>N)g68&wPDNA_O$tp=C%Az){XeW@qPCc3uz|LvR z{E0?OoqhamLhOAo>@)%#QR-lQq*`|EssTO{0%+|yoiwA5E{GKM2D9lxFbuL ztB!|{S9skiEu$3BlLKfP}J=BR5P_YyU zz7-H-#&H=+@IB~kj#_=O;UL9*v09kUCQCeKQf7-PO{vl(u20}s4_#H5t7rf;2t|eb zcop6hVnZJ?mHdpL^tKoD8Qm5l5-Ei|);Ix4QToshjJpE3zlE){U%HV!N6mnnP2|v1 z#^;vhT0uCEgmRhFzdbUOxC_TrcS^K2=KXAk{-%Nx2c^KK&TPz;>=o+}#3EcbGmw1^ zY~U2Y*NKqbmY>C@EBt`?!CCJbo1)9Q;*DQ~(n=NGNE^snk$;m5ZQXw zFVV3yrjNWxLVqrn{&-DUMZ$p-+1@3uTlZVM(}3)%CRpxcXDrlKpE@zaYkZH?K@80L zD0#~vF>u9fJPa<#U3EWY3EaCbjJn5Oyo2cjVmE0`G(v{iMOItqrNy{<4SC_&roIUQ zU(rJe_Qbo0)0YUaB^N5Qo(2?k0%qe5s2^MNAuN68wm%#_UPyROU zPuoXisKvYKlmN0)ltbcTfmrG}FNBP8T2?8W*62lHnTQ90B!nDeZh_hvCHm8Zk4`zI z=!^(i_g)0~dF8uhQeuMQ_%$`*?1mmR7H>Aaiz7OnWMb*_gVaY}pEZX%Uaac(kI-w| zP4T_evqmpP%NXYs&u%{M(bq2i5MKHSRXR&%=l&gnpDbG={C(ox!Gi-;IhGKPwh`V+ zB`aAcK6tKVVW=W?XlOF?7+)#I?1tm&9K=8F9UJvdWTG__ng~k5Xpnx(f#j8)%NGkC@#SuyQ_?uV`}iz8$=)lQErp zRQXKi-G-w}$8uRjL)xUS+dD^0KOZb+s?*3Or@i7JRG6AP2Pgyd)Y#J$`) zWot7P%-_tVFa&CCS#d_&={>WH?~&<}TxKs{fe8oBG{g?(lzEKE>#qTHDmA_;ah@ND2U5(%ARErwYhbC~Y%d~y6O^lC)n`2ANO zK|k%c5rjh3C~k30XdP0;nRZD0)y1AEc9$;JW3Lqq9ZiFN28xEkW$>#m9n!1kn?HAj zXH9drTpbZAjm$vnaHG_Ub0Nfe^`XY9`3xOI2JlyVWUL0v6rMP(Z=g6CHbUvFQRq+h zY{z#KJa6GtYXdU@gRSR!YqFiz2B@4WE2eDKVAd4k{MpF*94*&RE{$Mpb?C%TKy#{=@beI~ew>0=G=Dfv|8juC z&_~&-Fe?RT-|{iFK{@Wj29WWm9j7qI71Pk@NS2P6ISX#}DuTNU5)0&-+??rP2(vsGQjWUkC!xli+z$3zN!ERD6g!j@pUp`)d zJ5*#$pJ?SM-zQL7me$#2x2(=))YHCH`Jk(*Ru1=n;2akC%$ghRqitHlQgHqu;4RU; z3i0^WnysS!_G(HEbz8*teLlw8?^w#>f&$1mZbxMP4#UC;-Ap(nN|*6=wZ>{!RRFIy zBrhd_dTFISFVR-^0pRc%=zMBM(s)f5)!2QM8W)YLUC+Fl;vQZse`R26tV?-98t*mt zm9ohf6Oj%_+Susz#AtaK)hr=b>NwK3v6fO0+)K&)Ot9lwQ|L z#L7kAfBC^iWw8m63#wnUH^nKH(0oteA7DRN86U`4I4O>K;-1sMoA$RG=MCnP)Wfbk zj}SMG{`E9IgoV9o)#fX1YSa^m0W^xN2>m)JuEE!~!9gQ^G)Y8tcuXUt*_=;`Tnj#} ztiMCy_2uX4|G?Zg|?eX=U|!8RVn?#w9Y3*y``iQ<$8m1>t&30 zi3on0&P*ymDXK^D_{opWV$?Uhcks`;#y)5$l|Y z3Mqd6f(RXky_UM$>ypTv#2BHIO0UI|zCB-i*GA|V&>7?2O(kPafI!YHAT*=d-^rQZ zk5Ye^1Ya)<2d|2_DPw6PTkEq~>(I*j+-~udG$*Ng^+`%ybJ31MsjI`EME; zEtBZ2!vsG+XJ2W1v!^Id+7H&oKg{ZvJJPzl95)rG$#GqjKHFS5FXJdOokCMJKn26m zS#S#z=ynLvk~8+yD{Xh1sz!H6C*i-Yw}CIK2Ha~0aP!-WR()mp{CXqE$H><>9VE#d z{a!~&yjwD}nm*+7bEOxMQ$Y>hc+T_|>hIOJ9(Bs=Z*EBiH=Kleb<dOX~Hc5Dde|i~>Vraidxt8f%3js|J4`9Tj7M zzGq7(W+4>xJ-EmxsdEvT+ddMuK|ZHHqlOH=Bw9Ut6iUhVJbFgPCX6Z37@#D32L`DnF#lFN1!TkOaxQiH(LT< zIQl@Vy4Uq&-PgO4Q?4l`s$Tc}h%Cw?iC)8|e1g+&k7hiD_~saqgxbH#6+M7C{sshY z(V=|p@G7Q^v~U8&kB*$#@rhMn$0zOGuQ_EIf+dayv7t{CgMx9BQ0U&@(Gf4t_$*Yl$gy1^)kE=3(oQ# zEIjBAFPzpBZX-_X5*6!C$u=|&?d^U1l;harY!-#_0nJ16NoQVil6;f`pB^ik#Wg*le~FsH|o&bO>_P5PRviezmB zd|>dsXlz@2RMZ;fgN8H>7!JTvRdi9O@RuP}wl#TsIto^MQWs0b5t*$$)NZ(gQ}Uv!A2<$t^vwni%~%G5P34&<1i zE!K)EQw~k55|^dKT9NTg(llvNcL&gkW|XpPSH81_cmxr(l%O+1nZChMnJ31*L9h-c z^o*SR=w|q1Gb%93bmM%5?!kzR*u1pCQBCe305PN|O*jECt&^>1c*N^Zq_d`}@xfkT zb=H1E21X>Gh2^rKF%bV>t??$fV{8aDFX3TQTcCucwR7JIxiihI@e?c!V(ryt%Y?kV z>Tk7n22)R`pX`)jNNLj9uKnywa(#+Rwy$5rxm7R{sW-vWXa?Lbck85p2hMatgz`Hs z1g*8)o+v=ikCk<2AX=Hmp*g`aN3VB4n4KHac8^DVH0(SX`Gs4Hz8dvJ_Y==)vC=2Ql36eT zvm)m--s@w{X$GV5WT2q`mM~15XGKiaZhtrIEc5|&HrL?i#=pRkKh-2-Fkl;~9XwrQF6a50~{W{7AKjwHgOz%g?^Q$oYI*M|aXE8Yo< z4sCoJpNKrCtlX(mY3qMvOr7pFXJ};EA5No3i6m5IF?IU&*O!kClXfhC)^bS5aA&(W zlc)a^dl%$Ir82W#Yz7hJXBC;9v8zY4hCb5{M$+XNxv43eOANKoh3ds`7V^Vep`KH z@fS-IW#u1KCAjYgWUlF#6;Yc0jgZxPrC?skeS7@Jytb zXY4Pyt&pYp!oD&1lChYp|CE2`sLJQY_++_rZl{~dSzd$wiEfWN_b1qmUMD?~19uY3 zdL(2!>E{bRV%^T2D6Ao-kU`EeRXwo>A0e7#xvLx@6d|LkYp&k7u$a=P-Y1-t<`vQ?Uwy(T&Y2)id(&w4ShZDza*t=W}l<>4zt@>|cmo zDOfur%bNEylQ|c$$i)_BxGeJ#tGa#Kt-OaMfSgB2^QI|#mGmfvP;pHpbg;n(XH;-W zk!W1D=6xGULaj{75Ji5U5D7LN%MI9YU9-S!5JTEz(BQ?+^Kngh&O;)-NSsu0js>lJIWHrRdQ_FIo zAQ}ZNaZOmRtyBLw5P}Wr{k?cW^q6pR)Y{HW?#Mp%|NfP}B<5p6YA?D!u!q@R{k0|?Fd$b)MC-RoLu{D=hfsFN`@DOG3g=e% zf9RLe5ISH8LF~W#umY)CX zFM)x+hxO<{`G5Nr6nqFV*Z+M8@QVK5o%rw9?*Hz@zmC)Y-HHF--ifqnR7n>&)_|mQ zhk)SAm0|yj*>sKerqut4vdCChQLPmibbbeBN{xSAjQLMYzuo42=dUXZ@i!zXrayB%Kh9-#wYFO!wT zmzTB*K^pEoV~(_Ie?)>*T7E?ShQ zHg7igy!+>;2l{dK?Z1+7Jdrusg~21Umt6wi-=C_#y1n_A7eHhxSJm8+*L+i}htQ4O z*T-O?E?MTMR#}2;W}f0yDd+}Wzy{n1u?cnX&`gri?Vz)VyJp|Svsdo#Jg9$Rh0@YQ zz&t=iE6&*WJ!libc_8eNXH;|E^rONE=TOR+Wir)`);bF= zLLk{2nGSd`(8LiwjM?X3iCB-c|5jT5``sjb;pJK$h!_{f6P?e}c-@jjtj&b5eGbuLy=3Z|QEiS}ksR7_wP2y^r<*IXiLj=eLXpr<*;5D!(`XXD0Hm zkFSgbGWx?ih?kR*@x`zCUkjewi?p9N3vT@vgMUd>w!R{O`zrTbg?sw!->KQV?pX}K zdAWH|ws^Y~)kBy1l)DB#wm-A6`+e~RNqk|+A09X={SWtle|+yBHyBez)t6)t>X& zPU$fG!SS8|ABhpkaVnM_F5S(smXyyz53R=(cUI%8U%S7Uma|^mLqFXzp6%|LkY zp?-|Rs>QYxt|Ij-RSs2N`X66ND8xM9@DSg~oyJVNIX~`%!RZ2ybGS2tRDKFwZ@r}* z*Q~qv9IW0+Ep)aLpZa98K~w70z8rzb<~27@YbKMKYNtEzA3r+L$lJ7^SP*&?$Os%Y z4L^owJ>>R@`}x}eZoi|$9#-z8>7^h(t&640eTnaY$8)~%)Yt!JzEEZJH|$S0!Wv7t z6V4kS``4|2wGKO~N>+8qMsYTdDqc(EVUe&z3twy^K#iN1dwzR#u}!!4t|pB)zHkSm z*P30R{!01d_5Wx=<9e@Y90Q;h8eE#crSZWbiM6_J-_Lc1dey_3fm($`L|KDUoH9UPS z!0?D0L3kclmtz3tOl=57E>9^x1O9l5ng7L_7?53qRTdXORLNN+`RX^qycHo@$G%?y z{id&Hpi0~89*uCesOq7Oa`|p#mj$dNFA;1rr*POSm@&fQ^CU;rXyU_d&!Xq~&L#f1 z$1e1|wL|LMe#dQC+{WW#Z`L`C?p3eQiqvtL#%%d_S~&v$-z#R_ujZFhv1n}J4PoLH z)QHFQA79!ns7#iV&a`>FU6_;I`hLKBT{g^!P|;}z8B@9Y4D_D}N- zK#iwzZmyE%cRb?-JEVI+FdEtZ*pgiseLI*K@NNt!hx&jz{VnPbLN3?a>t9~&w6E*G zYu@CA06Jzv+%G-hgs<`e(q>Mn5Ze+>nv$OrOHDaq(YH^3l7!_n-o{BR`nyHvov#Mh zF($TM?n!xkO#e#p3rFXmu7&6I+<#v3Uw44e-X%j=#|Z<~RfDtUwDEM(1mQaX1jzg1 zM(s>xk3V>r3YN0`Hs6{4vd!_-ou?74S*to8)b$rri_+s1I!;i5(utXF#cmg6+s zt}jV%v`T#{>Y3fR627RuGxcd>i*YF3qMNeDh`X*ma{F{~v%s0v*!+f=he}=YkJkbY zH^$raR)BsU10iV#$tRI{&j>0LaVZ1V7`8kzwqy#&&-VyJ^c|s52Fl!U@EZ3tJ4^Zf z2*d!CRc|Ry&k@SXX-@p{2g9CO#a7>k5^ucH8#^$}w00!M;U!rfwy6)ESlWO^dj1nt zN&!c&!b?Rxc0@HWaI`up6rG3&k^lL{n7g(56YCpMYcX3ta!!{#sHQ`6Jx+P>^?`sq2r~S zmB~z1N2gPk*wstRo|vrj@9{PYQa4V^mQmb-R})LXRHiaxc~8teXUlH)`0N~OzRCT3 zEj5OO#WPJf?+y`c!4yw_*37M3@t?Q;y`nV4wmi>?m9i=Zv2u9+%S*41!{or!;O1gu z5nT9QFpLhe{u=qtLECRffNKfeQSYpl&$<9M-9!`T7Te)LVT6LhzEcf3R9GRq~_Z<359XgkdhcGp%}la0H!uD|49LfJUVe?y1v`WsMtS(F?$V4 z)rz9m{;}m|hEW!su1rs5Vr2(}v(~Gu^2bQS;>XdjG|ps7#xcqd4eR&L+^geiX>jbT%L&Fe zBp%$`1l&0guolj$C=a~_R=oA}G)+FZHqjzjKO!=h2*F&7ijPpdGLUrsTDb$PvJx1PTRBSUN{ISj0_?{F>xF z&7sBi8F>CYO(ytVxxZoO11ZZ>z(w8`I8=bsX2}}G_}gxN^;?adR*t5oM*E2W6iK;Qcj>=4Sn8Ng{+i&lO*9)`pqBcyIO}r{_hP_k zP0vY0LYgH+8Ddj?bFd-hG39rRhk?7Pr9s3gb#1Y%ujjQnQ+lVo@P;>!(Kc=B#m&(! zxjbw9nT*`0KOES=bqM{-oOOti(iT(DJ%OI^R}LdQStt<3x8&iYmP8_Od>tr)hAvNZ zTt9VZFy}rDxHh zWlaDJd---0{4@BOh~8^DYr#nE~FJbxGk$6?cunE21sS*PU;^dV<4E9^L@d!UtWZ%zY=SE3!@_i{@)F2>MGG!BdHQ(G;bZ^T+H=6o;-&wj!>kd(#BGgyC*eppgepZ^{)Cfx^8wMIu`y1Hc4^S^GwGm?^ zuoZ{y^%Z*PQt()I0tpue%jYI3e0TMfADy=25=K)*PloYT>`pdE{r?gi?NujpK7gSy zkB`NpNkD_xIhb0?KQwZbBQ3kNW3eKnb|smvkxFecd+`3Aw&s0cGc%|axYvLGV*&(xh5H&^)Nu;RQpvYVqcri>3OWb;C%u%*h$2xJ%88lDTd;ziq%qwYT%+Pz_D!70WRkJ<$7{AR`TNX`{MOw zNzQaVYuy!%n8#0XJQFpK{c*42BC_n4ffpMLqOeu;l#)N3!U-;9SenP|$#Wx@#b=qa zmFCL`Ww~ize##COpZCEJ>tZ+E5%AQ7oQP_AAyH7QdzUgeDHQ^gEBWS-J_Wi7-G>|2 z#eAMwI;pi#AOW8TjFC`n`|$5iyl7tDnCqMa*B@U<@7ung>HZwwEyQ3O*|M2$#>Z+B z=<-=v%4t(i*^COuHg;GibJOg2CBz+ewnkkLbEt~-KFK5fH&2FEN;efdt>3=}h=z!- z;HKuYw-t9s%ui`9;H+sk)#=qS*?u z9mNTB*_dxZEzLR^(4dRIEz%uP7{*l?l}5o}%SCPFt$q&+?k@ne+f3=*da#Tn_TZ>A z!2&-}#sFo~tOS#1K||;X(i#m|L&=~-u-N3aZ%_U6X3yLGXqbSc=aJc~F5b6zDR?G2 zo`%ceSgcf+(kzoh$clb9W*5Bcwe>g>An+Fbco0IRi)%i4>X{zhB z#`do4IWHApEt5L$uKrnNK}H^<*0R{T#aH}t*RBWlKC~sRy2(3lR>EOcKtgHr)15g= zt+p#sQxvAM+}AD1)K2|VzEfQnl(}`!*FBb49n`oAB0|-p5>zWiE324L);%XgT9cR0O3dAohd@;$YC%&}NwPC80x~u2c zIZOUAiSP%#=H#J5*3faqM7WEj9bI76ragGlcy2M-26qwsVzEf!xgv8AdHKr3-1ECd z;6VKX^Ub@bjz(4vdbZ=8;ePjb89G7^!Guw)TUOALrVkuT0-qS8pNB~e@hP7{kOd!b z1V3GQPtAI{28e(8hDE8$*ZlIim;`L@`7^iSTnlM7#wy5iG3P%D={w)C)qF;V?h7AL zvb2q2sOz_Wc?dELHxZ9Bn0`u}O?6COGdsx9$V7+K2M=TeM&DTDf8CX!32`^>J?15zBY4}5u&4HDpoq&DFg0AiEQx0 zo?5bjabPV5Cq`{AEy?cRlg>oT^+F0+xAr;(ae!ZCF^be-^?!kDBBW)d$NNa=MIFl1bHQ2YX*S^tPTSl$h{w*@-OrJLQdo?Ukzxzk00{b?9g zVa1m}a$&=gmyW-X*&W;=!quPRUoKMsqsxb_XtmrFQ02}_DJmB(t#-^u$M~F zFz^Qf1ktz=bUzR^Qj@UeF67@~1K*l_Wa@(!9^O;c&r#mlu^9tlNuYF_(jc*Z+6xqs z{9(~f*sak=!g>os%YCxpJl2%IfS@RBYZ>@_)&qx3s6&%_5=B_)qLX}I43B-A`fpPJ zvKXPCAb=e37$ja)zbm?2k=EK-Y>`TAn?D97f);e1>uELUY)O@l8n(ZzlAjhQIl^N4 zsY@b*iI{!Cqp%S><^Fo!KJMNFd__%5`AB~;hpZz|iv*_KtpSEVv?ma40t9v8vD-wH`9a6gQ=(e$hb+nv zy4<8Z)8OwW0%IoIN|$xX5^Y=wX?J?Ygi;s`Gq;Dvi+aEe!rTLO*0@oi^Rj#X^jg0v z7;C2s+5O=^P(cJ7=#ObH>T_M|;V|J2a2GpO@8C%{N6e1qeM5R%H&*QRkvzH48Ot(3R_NEmWmOyZ@?BprSH<>tguT)hP+g7fa+6>SkBt~{ z)lBfI1aa3!hZ}>9R@uywS>D$qpJ|Hit_BXatvc;szxw!MES7Kbxs0N?&bVO|A1o@F z8JIyC%;#{u&fio#0Z!#(!^^pEVV`WpGwR8W?epRpl`NU>UDsegsov1iMayZ_wD6qw zc$Cr%e^&6L%lZhsGq(A!;P=04T*Qk0WW(+@m1sa#e%>3$Q2W$#OUv;oV!24ZtuhFahT*qY>o_lQujp)<?E+!3V(0IPM9)JV7y29Fokbe+^x#6 zTL95tRJyz=3ft@}76sY7Poudtk!Gq&kXhhFhF{(vEpQ++ACh|qgT-Vs6oYBJm8Dyn<;sNYRae3t*j)O}O|brzgbLenS)R)w*= zKNM>K-IhnuNY=45c8!u|#H=jx*!wWqrDBnuR^ehqEtzc-dvCUw#n*Pe*G>JY5;e7u z6LqQv44i)X#RWZfx^U*pFbEzCd)x1@Bmaa; z*P;Y=t4lr)wPeR3vUfxI8f3?*a(i=~c*?V4)bPg`2_B{~BE6dgm5n)uqmmh#2LjGm z=#vJUuxWggM+U_Itl^M}(yFoSt#a6#Ug6m|f}TO9xd?;8|wFTYjlH9YSs zn6#%uopRp{#Gp0jgEWo<@`o@W99nNHAPH*ZmsC}30F`D6UaO=s*!@MQlILWVc{v8u zq#$9UZh!x1mOj5!gC*GBGf=s?YR0H*Z>Ljl3^3dwx_~PepnOdS_(y5hWi3ABcLzA2 z$G}8n4C5j2sAI==&T!XZJpj#;I9SFA{tLd4+p%>xzaa9;a{g0s!$}zj$(!>PL>2jk zrkO1F<*z*;%cb~^Xo6GTSbj$ndhW@ZF=otQVhrSIjF`>9U{Xu`5S0kw<5=k`gui_z zjOYYG5%ZDg>F39!PLdzL{^lrm)-x8gIvI0SsHwjIor4L>wqx=@XW16?pz2Y4y6s!o z>=TEtzggytFH}_bQW!rjI+BA@&Ed1fJPE1))aO`z?y$d*Z|xjZYnV~auBp{0+xppu z%uZ+X-ZN#8ej#FSfXx{llhU0O(|ArXWVa7K*ad!8Li;22@v;w%pB6b1$jAEtCsoV? z@RB?L<_y3cMJnD{*gBxlmYj#Z4-5xlV`771Jy)EMfmv@u@>2Q7uN)79*bKL?DD0|} zNtmteBX4F!(|sRcnz9YpkJs+2ux6oFZMp%sk@XKmOQRWkxnf}O@QUArb$)48Na zeR}FN{7J9~mNw_7?z%f(G7Nqax!M4JB(nnCF?m++KI75NK?i}ory6IKyx-!F%Rw*$ z>1RAe%qX^{bhBPWNvgPQ?}@llnF065*;R|5--bPEzcD7aPYfqQQk%sQX9Nl5_V_0^ z1QFCn_OPI|=9%UYmi-`Q%|2peF#O17Ps=seJ_MehiaeNFr1_wY8j!X)vpmYDVeyUh z5j^C!$^cN}LSfarP9M{V3=MdGLq6Y-AAsUjo&&I@I?uzc=rIdTw@58oF7%@6;5==8 zR+Mb6HiEqMh)Zv09q$-;tpOv&0EmYB?v(SFMDMz%3w~~T1>4!aKTC0yt{r1(B`Paq zUbtv|=mZh-7+kSdSSd)$^LknmYeX*QF`Wn53|}(bjD*?B-&xz4U(nJJp>9?N6Q`&1qvlT~;*WIhsk>og zwNiBARO_Rk$b4?IRXBTTzxQ$4xvTA;4)%2o1YHit1n-Y1`CJY1SMEn7`LG&Sg2>H zSrI7*HsP6VTa`Wz?jG-!ec;e-&b1MPX5aAGRqssZW1WIi<>aA!gnLy2S}39t;ij!I zL)3<=QmRM5Xd#+b!WXy}9qY~mX4(gH$YXp8=xb(_dTdUlDvIOdszA9hDe$VsNdC6h znkISr>ES^|M(+jyHY#pI5A z|HaAPd!C(_=j zy#^x!PY}4I`6LMl7tkFpi5Od-I{S zMiCopd0|&WYqoiCOy86w17rS68$A7J9BI|ixlF=iK?s^Ok3o7FvxRaveybVDs5y>> zZb5OxcX=44z@{y;Qez4at5W2fIC8NtnF3t%VU0KKaUL>ACj3fl)*3CO>5c+O#A`wz0}OY4Xh-5^NP`>u!7X@M3PCi;(~}f}$Xf@Ay*O zFcCI_-66B4llr`>4GJcng}j!-vf!by_$Xorh@B4ElMgC zhhOu7Rl1AP4C;EGxqhO#0nHIK9}*UHl*oBkMYl3Ln00zo)E$_eUq(lgWbv524~ukTZ-{}-qq*Pa z8^)gp0#6$HR)RZnP|lGi3BKS8Tk>#tB$s^{O!HnY*6&N?S4qc7++e($*Fn0dCBBBf zy~p5kzBc#&*n7*Ms@u2km+l7XjzxEhl$68*q)X`%>5`O|u0<_Cx>LGQxmPuReVldq~iD& z=?YwaG24whm8KhwkS(_6js!%d=Mv4#z>lJosRm~O?b)+sirSLI!J%?uS0P&$x;zTD z2HdQ}OiUbpyG!S3*dv)=QY8nSLfiCa$nY={7`6On_LO zFMcM4pO%dtvJhN}+L3|PJ>yOuF4RvWxY4;JF|=0G;!5y$!j$@6_rjBb)}{?~ViES$ zS<-6|9|MHF_|imazC7J3Sk_JvF@Dsf9>2-R?Tt#^M=1%_MXvXT(l}26kmg72_6ed5 zFHE>-#=M?-WMp{(4lOSR+K7WMAN~TI%-6hNkl@z44EzO9=BNSC>aKm;t-zjo7}HN9 z7Uw;@=KA*6xB1>s)SxdIVcK*CXV=drLnH1$PAvAQLNn}^>Hg{$aMozH^EQ8!&v9zR zX6Z4)+xTfLoI)%V_N_3A-mxEe8$oPC&g6D%&`-w}sYqAktF}DWm}fmqI9+$w!>q{?VLG3YpsMC?ehNCZZ#2d z_sx#Z2EIDQ`Y>I3+p>?kWf=wrT5xgPs4_=G*50Q2Fiimkfj{Wpjj16mA2rI5ZGsMW zapmb5PvdmK0avr~PHyxwFG|G`)4I~(=U#x5 zb-mJHGzhNpp-S-KbAAaqY|Hu+3hDP}kRHN3iFmRWIjAXCxc7bUwUvKimLqJH8LU zJm%{tQwRor59&4T|2Wy|ysr%*Bf>uZot|#(fQ&x7b`II`~b@h&@A!++Bs( zTPAG+)q7v@xc5(3 zrV{2Bzh!(&VEfPW3H|^i6qfJqqw1~fTlRkIP%^0IS?BOa5#k?|_h&_KIo#18?pN}U zFZ-OU|M+d4Y#x7!lJ*t)^d4-rCKlBzJ}t3Vc9bD2a1MBh95(SICk4uhA&j1RVn(3p zh{h~{0qn_9WcN9CAycnEB#{p&Q;Es`;Cb_P?lWgBjXXcKUXwp=WMM9%^qC;eL8ob8 z(YHs{eJ%X3L0$Hx%o2_AcTQW}a2l!j)tmkP-59LATC#G=g2uNyKx6|V z<9n0E{)~}3F_Fg3U>N#0`^@=NeMVUFXZlZTJc`Ns0;*9yBmQaH##Fn{Zfbr|y))p) zHoOCe(D2esP$R)%^Nr^evyaUVZ@hmpp~oYpwoJu7R=O@!JQ^{c9I|bMX2xj#Jj+CZ zlmZ)~EijfQp9lJ0KI)u)S`$SaTChjh_-Q>Bs;L-sGJ(E=o?+U=*$~6NZ65Ph!1ir) z!fuk`dDS~5+{TJ!Z$s?55|{(xe$U2Z`Np?f)=-1!$zAx(Q$3ty8scIL|3v#Is{2kV ztVZS=z$w%~W0Wnj2CPC(gpiA`Uxgg!D~!-*a-R@JcVELYbBox0DLqdE$GG5ci`2z^ zy@_f9DKyT14xG1Ch;<3o&df2zWyhb2I5bnEvEie?lJrj^!D}avVpFj9)J3GITTfj$Fjb)l5ce`MMlkUaXaW zh6`HzMisu#t9hsvse_W{V+E=&p{pU0#9@#ilh2we82@DbyS`?>;R zBb^(I``(w_!Pv@{lu|SzjLTcB28=W&R;BErA#cqreGN-+$yINWy7#DFb9v!|;k~)v}R?)%YWlL!$lg3X0B`;wHoM;R5}3Xs%;N|!{8E=Aolk`m(=uWAV5vUDjjDH^&;(qB9p z?rm=OHZ+MM=vcCfdtffZ@q`0rKwLxJQPhxa1J_`xog?>>e`+0zOU_3-F@LCJ`tdI( zYSDu>(NB9@K|rQa^Fx$Pw~nk+@{RSS4aPtT)UYi;TmM?5S}0f@wtc0dy!M-ixlQrJ zV`;Kz>ukOM4*Ta2ms@dy*!f@+fss31mTYV6(=8W5W@s#wh?UdjQH-$_j6mVl-#3a+ zTJCwX=Ye|56(8BT#=x1^aM$A8d02OIyp-?TND^lDs|PNHKtEC<{YJ_r6m>Z+(GvOB zf!xoInc{;@E%7q$Y~!|dt{zvng>gCGaSL-Si#Q%{(0F!xp&LZKNV3<7z(^U1hfp&y zWo-;Tk7khW>nHnL>bo}Cp*yrkTseE(a9&N;#`%2yQ%w~)i2NFV9+-%T%rNG0N*!`O zAo6`!$YjyaBs~)QJrUxQ9`%o*3!4r{mFPwzRiH=E!m=U%qNSxXOAt~ zjNs~?p1UNc8xx=P)^ej&d}W|3o>>a&5uKjb_3CeQ0|j=c^Ly~(7%F$Zps6gs@T`9tM~PgdhMp_g#?-4pza@x`X9)BCAtnt{xF|E=G5KZ~y=MSfwE{@~Sctt<(X zM1*`mB`tVZKdWyG_NeN_Yr*!G1}dDCgjK4?%hi;UxX8Y?Hnh{p@r4AkbBk-6aRS{h z?X-nZqaV)i&R9X?RRw&%epDof?MTC8V#!0=q%*FB#euH0NLUE3fU+B<$e$7ATt4Pg zA2%bqg_!QSjHA0>s9gp}jAt}7MA8q8dLWK`^+b_0b=xsc{3I ztnbe@FCN?b38|MjsX^{-O%e93EG~KvhHoX{^z??*K|ql~g=cCZ-x*1Fjz4DR`!zC+ zlm{ysdev;P{2`*ycQ>glWNU0`hJeb+cVRhtc7!*HC>?%eSN@TcG3htD3C)m2efDQZ zre3-A{y7~onhR2sSRL&nH5O+_jV{tS1HhF2y-%2d@}1sxSSn$^nLqgK$%Gg&X+u10 ziE*Lddf6sXtyb<%%$}AQ=QyMnFDVP%wE^~FYE>z9@EenGc40(AlV6eAQjHG5!RvU7 zP^8o~zNtvv$`3<4fw6440^+%p?Ik(8r+udYL;eB4vWyfnk~Xa@!`P!lxEBotJxSfL zp|S|M2`{nBqi6F=E zpVu@GW-_p}0xykCe?%H$(c7rzJDsMOe9j56{T}SswF(3Y-`G7rq_ctjRoa`fHs|rr z_HwgsBjmLiXC~npwCO;_zu^gmks)F^Roy5My?Lt?=+Z41>ysnu##lE_?xaDd4Sl=> zrbjSCdsCtK*%iPVkCY9^$>$kU=am^tI&*9^W8yqLdu!26n-hbx8&$S{=F)jKOfy18 z+12P@c6&;^F)ScsVz=%3ZI}{WoPVvm<(~2ZgYx4gpRbJQ%ES`U4mv0<3u~bF*!FvL z9!ItBSJ36({G(pu%SR6FT5OoH@ww->;j}POZl`S7A9r;Y*@VRgZLwA^1x$^)W9Ary zVtoDikr~hWnk}L=jCTZ8Llu&guzQ+s6egje`w!kQ58HB_;FR48zd6h3ml1iSA*4Kl z?^d%{#9sS@S%P@nUP$#ao1hIKN^&~RmTimAoqhBeiu+^sYn!GX7&a?Rex$)9xEh;l;qp>GS0abGF`ieQEvGFiw@R=DvD{MQY9S&A3k^WGe-{YYhFTc97V^CrSko+k2ZBxR0n zQX;BW+M}jVR^RFkl*pbf>!?E%9OlYM8^u1Stu>O2Yo`Og?6inZ7W%V+A{<-tR7puO z=i@#%;lC8Qb-h@NsP=ST7#9)orUJI%`a23)Dez;pkc8k8+MIzlY?VmcI!#*JpLNeW$>x5Gnl(KZPCFoI)UJA3=eO3hfT_^4M1TUWo# zOFb#C$M7Q`SvYK<@0|=`63e16M(^`HRth0GDC^Go;3hFPYPFk-zZapCaB5mpR9e(Q zkw6kYrsb%$_Q|UsPJ{acJ@Y2`wM#FX{pP_=;;o-T+PuOV zQ=7^j>|diEPC9tqA9AbZtICKpvVCmRl#eiiCyel%jV0k^+TwSag`Iu<&s^UK9`BRg6m!JYi^+-bd2-Zw+Cm{mE)D-q`9x z?5%;DRD8Fs%E4TC(^**F{c!#um&9caa-3KievDDbU~jbm+n_I3R>jWO^B47}&&2DA3# zk;F&E8Z-1XtD$0|wM~5@7V@sFm>(;G;ihS}q?uVLs{P^Y9BY9$uX{&O#Z5KCh7@ozR;7(52giFKj6djWnKa>ek54Tw z{<k*H2YixqVpca*n3Faq7F{Kjq)(oKl?l| zMIC|h?u%_ar{R$kLo(EzldZK)_0AVMxTrtv%GbEN+K&mp?orbskLrz!4W@~kePQ6b zyl=jnqUOs7vR9N0nODM>gK$ILHT|Bkv~F(+zu4?a(T1XPdE0CJN0+CxVx6VgD39F{ zy$pQN9XKxI)F%c3^LH$eucnE=sq8isjLH_5LX&}Q*3L#jyK{x7EmwrROQAwk;~2?+>r0ooM3LSaj@QwxP^9<-_3RD>3g!%|Zp$|zK@O@n zc=;dm_U$6rnRVR{e9LNCvV~c%#>G}<9iOKm=e+PS>YWv7=+z_TjbT{9p^>Db^$B@A zu;bB`?7-{=XSIS3uDvHm>nZA6m{ESW9X{>`)N@TfOr=}dKP}d9ZUn0-P^sdF)?^nS z_bi?DEzO6-YnFImHNzl%yLYT$LO2TPSqIQFT-?im(3~_=KO|eS!EuI-`84eD-S(x_WTt#I?K|Ttu#CY_nSnx~bSsV=*Kb|X z$6e94<%d*P>}%X1n|PE19Rr8UF(;v5K;no^(}~@SN~gwi;(t=5zruAD19#Qq$8e+> zy^Os*)k%|ezTi_o1{P|JeE74W%yXZ~$NJoiA=(d^A;?Dd<7~CAliQFmChAUfGm5@kXd{g-zO1H+F^gZ#Al=`tBFMfCZW9HCK zM~}y0G`GYffz6t(+WTVT?Y6!%4mth92PwpD8k#c@9gibqOM|1WsQS2tUj91vXrDJ; z4=BV+PcIXWOl8pB4wIZLCovBniiR6w5?MeSD5@s)l*hnWbHh5$c+S|P$|?&h=UbL4@v7#_b^!$sXs$ z<3~EVieu6?4}=e|;LVKZD}l;{2W_|*du!0O4>5_uw%^^d;Q=H)#H)&3ab_~zhcIA&`E8NnDx^ZV@}kP?Pu`)N-D zd-km>;ToY|0pl+=tJ0p0WL#dIN(C6oEV2?MExMALX1e8Sv^cT{r|xx$00@v$C#oj@ zxxgwPufD%#oq=#;mv7Y;8=F)WBucV@h)WhBPt!78G}JI=*aNh>##@bs-5YC(-lTV3 zh+$pKN`ljt6l5C~zTOrg+0VzhAfF3zh$X%uo4rxN|6Teixw6m8QBbaSJ{P&Fr?p5^ zs18+>zdeRLWVUpQ)iz5$XHtps%VjHZ?YD14df4QJGi4zf$nSExkM(~-ay8iR+W#nJ z+kB6re?vQ#IsK#B__h)v={waDoBZ0xREuN}#quUJ+ydRA$Ku&yH@SVa9LwfFL6HNSfgWp#8rVcdew?4T>*DlmESA4eQ7`!M zaL$KbOg~Cmy#W@Y(5i_Mq!D=6iea&F7P*4LNBt#%SO)IOS<)rDZ7_Yf|M57OP(^nB zq?CuKT_YZXv|v_)W5Q|EAd6m)^z2FW%5rZY`d53UsXXVmTKZYk3B$H}5CeVI4z7;a z1L4guZ07cZ&G?Pk+}k46#LJ>EnXyM9oDQswKwdjCf-K6*$$f)Dwq`Us?Fun4|I`8K zs5fH6obk#X58<*f@qAa+IRI1m|14Uo?g&#H2v`?!e`LD}HDaXUi8&z{OFu@fbc zy`dv0ifEgEM>s?}o$s!mxo_kM=DWv*tW^n3F9VpGaw6@4I_#nVl*A<#w8&qBVxJvS zDotBF`v4N8)Sn6v2Kv{|?6XQ7`Jnip>2lE@J;s?-r<64tHKIx-HnB;ZSD{pX)SAp( zeroAUJD|5?*V*YD_R#-wzs8U5$^kL_1N)pHq2np-`mID&)e(;Xe>Ur=Tv{ySdmAOT znqlareei=XI%ULEw#BE@YA5lB?So6b>|MdQ<0z%f%)dx)1Vxo0A1#(y99Cl0gqG9UsY*_+yeYOPt5b=Uu;?k@rF}l?iA{97Y7Uzx;tlbZ zZk}RyPlI@Nh;hyocly#c#NUlX{ZY`7sL!x1#>j8Q&R5eQBIQ3Q3(o~6w^MtMOW-(G zP2pkg3B!$A9ReV^H%X?~+mJXU=RcHH@TVtz`;=xL*EQP(W_Y!;yC3%tI%8NBnK-$W zijut!U^s@Q-D@AJpxm4{c_yL7lhUIo+XaNw4%^ zfHhR$aCc#l6hBxeG2e!#M0TQ)_&__5X7r@j?vDTKLFjzq^H-B{zMH+F4ZUx7l>H|K zM{anfbCfQ>Qtw?@o*%~w8Er*u-#>Oqul`rf_-AGno^(H}2Q*`IhWWRQT7W%g5(Dal^zbrlJb{z_;^IKG$-xcNVFF%GA z^1R_yg;TzNEbak6E6fCd*f_64Q{4(bw(No|Na?d?@T@Pi>V28_U&(NhW1&n)uJ4TARcGZld>=UEPmLj zvNOrqOtV?|^ub%%W>_+8yjiB44pTL|13$_+yJ9wWfdRB1xqDb8+OsF4=%Y2BNqr`J zRc&jsn`iJqH7Z2n#IU)rpe-$pkymOf`;}Cnn>iSAu^{3l*2X@*6TiDtCH+Os8nb{7 zAv8Xe3Kag97`0rs%3=;)+gN%Sa^*F-Sl6|wn#=o>vGsv9n{AZXTY-aB*NzS=zL#`v zrdK6y7EkOTU)wdx-HemBmZirBP?!19s>|}ny#*T9H7IzI#O+^lO6+MGFd8*^&4NxE zS67vZtmW1|N$?}5m^xUFc8)LkVo$qDAYm)gQPJ)U*~M3a7u*?c(GW9EI+vk8?i3CD zel{1zv;LeE7J)Bn(oD8~WIDr~&T`~nDpvwBoQ?KPGKl5a<6YA(AeAp}a;rA${SXSJ+fS}X|7?b(w_gkOPpzc3U6WmTEZ6p?<3(xfZ!>4A*RRPWM1uuo52&lwgoPf7 zyw&noWHB%X5+2tzmT1>CC{xluiv1eQ z_(9CK%Z*_vu>Pw4(Vgm({SuCtS^c1GU(^wg*e~ROanACaWFAhY7~zo&47FbmZA9Xp zB`$>LoT&MX9tLi0tqjVK1Z2O66Z8kXPI%3M$$ZdKy=Be(aQXzADbI)Qg~1^JX6M^9 zaU9zT=dJSAq&He>Ka)U3U*T|np1&nutv3gT(Vv`W-94?!!JvHt*7(L12j$Lt{NQrzM*!w3;r@%kfYnJ~~OnBDBHR1N}=N z&L16HGRe4!#_mfhiKj52N6YT?Rm_v|8g)<`(| zi?(LY;{tipXVrNxHjpQ{=~X)1H>ch6Fpv8k1zFPT{_tLxm9{kx+}w{cZ)OMlj@t-S ze`s|2!x+s?J80XPbXg;W^9&;P%0j~qf?%)SIGiMxBnKEB=g4JM4-2W~k#%&i=i$E} z(+fD7u98BD?!z%@sQkt zAK2F_8W{IQ;5tVq5Z9ppA~hr`IxGH|I1BL%Ztj)NTh#Yx==jMmzfSWD%v z1@HN{3143cQ3OOj;c@$@Bq*(n)*P+!;+K`Lq;R9=W#vLbiG%P?Wu?Zd&D*2}g@vQj zkRqE!2)WJ?9W5#558(Bn34B%Vz4Q`Y7#cURpKT5;e=N>IB*Hm|*U96ra)7)T-NoZ{mU__! z&SyS;tqv5mdX#q$siS`Ip~$^n?)4%R6G~)J@JG35Pg#l4^d)-GENWgGa&&#```oR; zrpc4xLCWarplQMY;R$37#7=A<>~ z476&)L?3W^O}_r8NXzf1x|mmd-l0njl(TzB{~#~Jltj`^pVfF2Jyl;lHi`5X%tnyz z{3Y+hAto)fP6lzzHL2YqXKlPmw_JgAMW{S;r2xxkp7u%!NvscW6hllc@{2;L$xtQ9 zW`z^&Use__)4&xU95@wi?>^!(!*oql6*E?j{>|7rDTkF@zaWh4v(uOcPpd!$2?Xj( z=J5q+#E;DsL;Z3Z%`}dWT7NLPKMl$CS*g4kR-!y4`8Ab6G@DTt{{nJS)#*U8NKjp+ zq`$2H`Oa-~rEO7+K83^mX0u?nX{fHXI<1?@NWrG5dfQsfytHZOH&7klE8mv1hxzt= zor>jH)4LsNXKe@0{s*sAHfnsn1iji;FYa4kb=k}#*!}!xs*$T+zX$ttVf*2(B$qU& z2dcfa9}Y$L7*bW0DWJec`=P!#0H`P8|df4*71k; z@V}?bTTN{O(kkDbeqeSpH1Vas$n1V}?!LPHfN+wG{WWb~;Q3gq1c9KUt?xcv=VfF#RbRyQ^(Rq5Hg1%ZZeSXQu+j-kQ>D;bXF0E9jjA zN_ZYsoa#FkQKp~(ps7Mn3$ve7&N4d2PzF5lb>JC7KU^CT6z&dW%!mC;Yxz8zvGy?i9D`_rbw&d-1uxm|1FF_Ur1oBP_J?*%>3 z+D(k10F)L9z@n*DA2fX_Mi@*8hanCgIDtsxUk(&Oy6%lAo68FA#Of+F?Nk{Y2fne(q0R* z6!n`G-|;rFGE$y-{Zv)lYn!z*fz(J!>`J)l^S8s(t6~ijuxWF2&j1Ym6Dmc?;1! zt_@GS9qb>-UgQcsD>z+h5b|$)x7fg!@RBpHX)H3iwm1;jm%c~!24m>`)V2KFyiVG? zz$sX=HH8`zVMt0J+VOctIi@ce{dRKn$@7g+l0spOfhjY&Zsr2Df&fVQuT$XD`6dll zFoVHmcK^hnE~5m^<=u6!2}@{|jIl~Z#p6(h0+C+fT@;-UJfE5KC?1DIv1f@tLkYarCU`DutVV?yudIaCpp^Og8yL8^n z=g=(8$TR-+^uRH?7m)Ys!^uM~@3lTgQJo8-*hdhdZ*4`tCg+xYZL!xva}@YD`rT++ zYC0{|*teMM;hT{HOB;>Q+3adZFj=&BdnhFTao3kP%65-fh4?b-lQlOup1L-6JoB7m z?KiigV&hH@=Y*5Vf<>gFHEmIWVUrPPagGtZI>$iXbI!hI%gAd_Y&(wW_!nFdnh)zB~*_3)HW{hL+$J61>N`j+9cxxnY`_1$iuqTRVtq` zTvDM;ku@2t&k1&$8GlE7s6RjC%kf>n99^@#IUV>Nn-kvlh(f3h{Vv>i(~iY1C|X{1 zH22_>O8!u-azn?%xpu`qIasf4U5ZoLiNbA1C{idxo7#sg#&Z}2l{?#~!N4zDf{KQU z@7eJ_x8-Ry#Qpd*pO{0bMM@j!&(xy4_DL04!T<@!*f~9CL+O9sARCUhv{M`U<2ELw zmN#WeuePf8{QB@)t8l+?VVUv;n>J_0g(+4Yr*;%n1(A+wlyGS=iri_GP1dq-;+dQI zJ?)Q;qAQ1|Sno2zlLm7aqzvXv$5UYd*H%!6HA>P{m)*t3^^&yWw>6{pu*2*Txi-e4 zku^S~Y)5msPBE%Y-|V+w-FEKq!0CHU9m+35@$j8k?C`8r-_b$K{AJ_xGPyohoZ&~4 zagVz>{;mun@sCXqHVpNi3>df-OG3}Lb|z=+=#Kbeu}N4&G;c%LX?X4gf(XM4guIg) zw%;I;+y645LZ>cTRcZBD@H(i8)MOd&u z5 zs~izUkkV)RwK+Z2dg=FRwD24s(jTk!&M}W_uPVi1N(c zh;4H~#M5+?Fs06yC5*DiWgWU&;|Pd&VLC&g4vBEv7EoaeaMaH)SuoaVPv7GeQ|6@4 zw8>4%WDDxDfeJb!O5>m&gF_UWuaQt?mMAms_cvCfgdL>o3H%Z8+ta}W&%2vXCsC8% zx2kz|RVo5hl6iXUTeYR&#V4vhlhS>VXanJxoMb;AIxepeXyEQ z*@6hcC`_2bUg+SGUq2>kd6y0Dg1u?$M==sF%LTS=Vib}laDGFr%k^5#t8Sa7;%0vI zCc?OyD3f#LU2PL`woL`goko;tD@q5V2(M3X3Q{%e{I%D4-G4toCLSf(+~_~Ku;aG0 zzkSvxnvT!OfN;e+V5409`AEXatpR?yS5J@>yfL2gNcU03+BOpLHQ-uC4;BoV+Zhd| z>rbqjuj{jbuM%TrByPh0%6&-A+H3WRyQUt+l(4B&Hm*{C9Gf_`122Xs3qM{0euOdM zq0K?yr0Tz3>O^Kf5&K8qRpFZ4txIdiX$K$!&DJM2064sKur4$WZsSmY|SpMG>TMqUta zTY-X#vpr!%VX$QXBy^vp?H|fyE+|Kr3G3nLav;x3*O1*>E{*cNg(BHQyjyFWmP22Z z{Fu(d-;|l;coJ;Wi56z}jA0H{r2nzbFD@k;S$theYoxR%vfIZ@D*pwym12G+x8(H`bz`LZAL_vf|jXb8twX?QjxsE|M^)D{wtOrpPVr>3~QckQv?<^7n*Wx*7 zhJ$XhI~Pn|-ZBv!X?-hPWB&LaTg0Jwz5DZ?UCRK$BN5kZH4VhJqGs)#vOjKFbP<4~ zAi{mx&;`F#V~g{5yPFlgU{i^z0d`h`*xT4#Eu?gWbJHUwah=|X`crL|ug1uDh(6tN z_HeDxt;J~OfEDotpnIv1K^FoK7T+;c3a01S9-XV}_AO?{e3h~-?w#!xRJOvN5P}rZ zm&$!<7NGQdNE_(e1dtDYQvP0ydFS#ZLEHa`=KjML9@@$b)^_bE_ZM=e5C|nNety8o zLmLgT&oNnrl#S2I;^2y$c}F2skghdfoQMyADu(EqT|7qmsmn&^Ubg_iegptTx)rUG zHH1w4DkHZ3TD!qteVrz=-RfWIrqOc3Bp>_5r?7NYTcnZ6(@oun9`w)}(a{4`M~&m2 zjD*n&jB4j3JYxid`t^Qd#h2we2_HvL{;spw8W@%UV!^L*+RTMk{Rd3Tqd$OO_@@G` z_?jE)gvII)psI*2&pxq})-Y&j_Tu;i3{KXf%VxjCKinqRYA^+TD1hu9db?Ul#`66u z%c2coj&9TVBEy}ixbz|TFG!z;sz=TDa{3dJrWVZ+_u%Za+ zih!cd5+UdXz$m?vYGRjVySC0lknx3H#NlH!F$+%%0tZ%g3{18>^mo9e!T?5i@jYwo zQ+a3Blg&0~bPZUf;_BIIivLQrsKZhjtf}6uiyPpoT4R8_N&PYa45%4acxC;@ z4{-lZ7Xyv3K4~%Wn*mD?ZPB5n|Jd#>JqEESV3yby-92Fl+WtD#*O;@*C@CvDF`fge zT#`{oJxu_#06PHgZ5#WC-h;$Xd{tgCzT|5dBze8`i~uJXtop{^DQv#0+vZ{jE)NaQ ztfnwc8BEK4oqGuO;oqqUddz^9nLaxD!2sJL-VdCbz#X-VZ-A7NsPQ`Ri7Rs*pldA~ zQ4$u#LLd>=!|}koF=#96u3iWn_LCFSWp=n$uuV{UX|i80H*IqnVF>Q7tad^p`}veG zym(U&k9?gh-gp~sraUv)1AaHWz`!13e^sw@GUPo|#`4d_@So@&4hYyA@TBPlR^&;h zSNr#PAAqvcC-SCoeg2A;|~6&tC$0 z9$KF5F&t4&y94p}YmI`TzrkFT6%akZ_I;uyseY%1F?FvjZ*`84{lD#NAv8%bmrjSQ zk_XCYWoRgqME2iBFkn2u4lKSuG^54(pf1Ne|zdK(nzEg7)|JQ*jsu2imy z`MP$Jbotn|(1TZ4NxXk#_Y`h(G zf2Gv-*0Bn}x3imrsU_{!g|z=?9{Yd3$$|qQ`x477)Ir?op?`@0Sdib1$sFLXnmOPQ zE44nozyEpL6NN&6+Id+qKBAOrZ%KYC9rK3O^cj~M_WxS;@ZluWRi z(nMBfmm-d{D8DO`|#al{&z8_J6&X3TuQ7t^G{j8W>P98i1sQc6Ym%51{AbU^*Nd@hO6O5q!Ht@e48gp%mb!zD81L z_NP63E8)I{3Z~eCFMf5jl;Q>Q-dbb3tWxNp6C43=15ogSrIIF~O!>sTEO7pU4n^b; z0ZH&VGHM)<243x}gSm2@h0D_GFTsC7zRiNPI6><<*=MWIDe&h@&^295Fz7h7cljJk z0we~44u}5LTkT(-rb#MTg$pIM-8v|M&~*k3PCr4U%Fh0j%QfrQ;X>^y03>t*wyiHx zLd);+T;#@EZYXQ!Jm;SMA0tfVV$)muCGO@u6Jzy%ZvIL!7e_ zIJ7Gch@lxGI}WtSWT(LQOUe0AIqDd}anY~%(Ro-0$ayAzFHeCe?i(OGB}yL1>=q={ z0Tu$7cOicEM1eul%V8!@nlza1U%+c}hX|_e-DLW=U@VJQ4;?-hSkZ##34Uj;!Mj=Z zb%6Fr-sm1{=FXQOalc^ddJH&b~TV+n-B`~NzX9Bfhk z3TwBeUl{oOgl>Yw-T7&smrNABKgZYKZa7{R9!n&pR&QL3Q?{A7OMre+^ob#Reckn?7L0<}9e8V7)+H1im>+a?CIEm@cL?^m z7`uD^J|&145cXWqg|)hT0tcg4MDr3{n(42Un53E){1snuvEbBSzcamyay9-;K z`geA4Y4x873hXY0t{oPmZ&pu(fZA@UqUD%+lKRQ)2`o(zC80I{=4sF$ySUkI zqZM1n%U|HNWmLI8M-W~b5}T=ea@Tzw_$6h__er1vBac@>!NE-XvOEhwICYQs9^Qkx zOIEaMW@|=KewUP^F#^nO8aw10^Z*b%4(m`0)ohK&L2%D&4F)6embL0?5z?2|K+@o)o)Z z6E>E1_eCfpBC}OS3q-=w6tx_Ck1^@CY0hZ@iwg)4YM)pJyy7wCY`rH+u^|qdk-mO* zQIgR32vT?j`}yboN4`UQNzwz}{$<3#?NWPn$J5O|iP0_K^BO(j`evKVl8N*tdG`Z= z@A%zk6ZRgwtw?^}=~ejS>)4OKdQHfGY1bjRh&{$rv6I2bq zD~VplYEq1tZx`;`OYD!6z2lKDVW$z_7Sxr&{io8TQZyop$w}+hi`?EY{tLu;uqK~T zPCac3WoydwQaVgL%8(YP%OsaZi^7mK<@mnsM6_h;e7{|)KqA^K!o+W&_|{Lb7@^LS z*dcj@(qL3(v8C*Ioo7*gv)blrF3E1`VAY8xUyi#R$Jbeb>4~X4vb!55G&`p?_HR7g zT10t*8`aP!HGZfFI^67TXTRrRIbeGwFed%EfW0it2)z!77b93l6#JU}+QAej3n=EQ z!tRpQ(XMF$yeJdPQ(HK8w%WT%OXR@bnn~MM`m6&ye$NL`!uak{u@k2-y%*FHOoH2! zT<|=U#3hm04>x4?4Ib;h-x8d6eMx2sv*LfaJRB#IbF(=>mA5j8k$DZ|sXYJ*EXcg| zEtyGSZJ)Abc6!%`_s0^}cr)2<=Ok&{X~%uieIk^j94+y7ocCYiPVm0EQrm>wY%@uW zz*r+3#WoSI4Z&11*UkgT?$}oJUY1D0Ev1az#BO=A?UYP9NKy^IYgkA9`A+0&6g|fONXdCwh`(BSQQkpF zUgo6o`})J*T#f2XV>_yA22sa(5XS6B#h}uC2Xm)U?gs{S)7RYOjKM+K zrS)(pjdFK_|AS4aQrYJ(V7$)J15zcCr>W?F8Y%?q*3z+nZ+35RUqV|T<|iO|ZW8*b z_YIhqYrux0DQ`EVj*Bi%JE+-KkvojtN%=pOup-b7|Mx`^>WC<3Wx)Hzg+n1GdrUl5 zuO$G%WD+21D7mb<4|Wu$glag^fJLKvU^`tjrPbPqW&Oqc zBl{kP%$sV=$Y5xml1c0rz(}?Gs{LI{@__ZQ^S!}~mWpb8^U8|ze?#k&BkHrfXa-}g zB%$rA*j8-g)GuWqe#HxCo?2-#cr;l|_ke1qbfJv0t(F{)fgjtoWhB{=@iI^8LUI%r z`}3U52fF`=7U0qKrKBdkS9hhUIWBHf*f5CjQPL`novx>G_L zX`~wz1pLlx-_QL#_uhJs@%?!Jykop$>>qpYwOCi3=e*{e$1#t?XvZH2EDx}Anfi2Xwc_?54k1#Sti-9j~&T)6ZDY ztAvt((iP1<$mAY8;E%T)StT0v8Nq3D(w%|g^q)nR+%=#SIi+Bk=Aw+&qhy?Y@QjA~ zfY4dvdf*}>R-n2X?4scM&)BZEb$iVN_FV5JAX^h~*u(zfX(dfe>wwrynK*2DP$-0k zSb(|!U=9y9XkHj=oDTOiX*dj5s|i2;p6^HpyhL1#ZJ6d79(Qq7!`s_egf~A<=8+VQ zFy#um)@Q4s(50A{fcgKibaO9+;ZKmkyQ_UyX)h$sMkW>f;hdXO)#aGhQm6QK>J7W& zIUE~#4}r3HB^cBBHfXQh5{t&v0#6WA?OoFy$k>1!Ag zll@n6;(CHsDyHCwEjH4OSa>b$`G%3-TEY+-uzd8|7lbcqkK=0%ZdNGcmVrcc>UfHx zPYIUZp|tzGj}dSD<`(FnAAb?%EquzjFEHE^505NaTjuOSfq>=T?(%EsF5i2ZHvAN4 z@-XQ^GoxL~J zrOy9sHDiWNwy^nl$Gg@x>kU1Ri58FpMX~O%>wcFV6Gimw*#N|%()k8-@!agDq*onH z(cohvHuJt)yoN4D^3Q-yc*5=7Q9zPalIOhU}v2Si_*yfoi7A*7Q1@{#b?y7O1tEk@C- z+%Wz(#V`A(xT>i`eB3;r3j=Ye?b=~d%@j_xpxVj$Hj_Dyfg*r@n=0Jm0b&VwX#;K72aOQo`* z0Imc8VuY#-ZL-$v(~p4`_S`Q3n^-%$5hl?AwX+8+va(+svA6tgdQDn627{{f>9x|# z{-=9W)g@^}T8{w82wt^&KMs~F2XBLlbou61A5S%`DEhgn6*|pOuKS=w;YUzP=v}SP zf7}IQ#1>$B!tre*(Y>SM`9=eqhMTV$ZjZ?V!<5P@APj9% z{bQ#H7>&a>U-!2_;qO<%rz652k6-=zm67dXl$g<{&dzKj>*bwBIUhXU6~Zp;bu?9PLphx(Xen&O8LEBxF*j zi5F?+#A9@BlSW0YwK4TsdO&2Hr$AAT*t>4iJg9DiJd4ut58fv(GwhO7jjisM zR)PeC`kHa#SFP%XrzZ(9xrfh7KR_@3Q8$_c9tUX+Qu?+1+Sd!WWx(z_$6QDB*p}TK?JQjB`HW@dyeQ>+BAgyl2;^3fy z8`XCVXyLV5U$dgqzrAz0DXifD+yJPAEw!N!-Sd zo_tfFlCcP`kFaO=z)tdi43J>cwyuTJ)4VN8BWFShpk*tQ90R#`@#{<$(DBGX|atb zZyvq@xowey`>mb=iSafYDvD9RWkUqr=HcS?r$|=*Eq6b4g4Jv? zP3vX-(34$Yk17APHHY+a3%HTi17*ec;1vKu9^Z=^kJMF0lxRvLKPn+-CA}mAC^vLl z9mOwxVT{J(u5MGAZ@|iQ0i>Ug-Ugh!PHT*d?4bxwT{f7v#e|X5H`i3dBC4@zAECUV zZ&^xaL?JxZ5}Sxt7VG2q@Vl{e2)h8H9%p)?`j#p#25K1t=$S2oH{nIzuM2iAn$x?K zPDX{9j8z4%tHs%9auW$hXilAr%qAZ%spX|+Hbu%fdO`n{WzJlMea*`UC?HB{QdO-E z14=-e!kvh^vPADOJ=K;VGy@Z(HXdbozm(uhh2p)k0y*|Y$=0CeIYfM+jnwI+jYr`+ zBZ~}ja5n!UH~D`pjsN}dS%9H6J02agI?7o>T=kjO2L*=j{b!pGy{qM3QhU95<&5j# zeKH{mXGNiABB6}o3YQ)IB-m>TvVC!2SLoATYH4jC0T5#2g71&jiZ9M%oXhY~4V6DU z>ZopGH0rc+>^BY~Y*`|sz-a}tp;*8BAIIXLD)$q&JSZliL`8dP*#BIIn6$?Y=$ayc zLE&dX?w_23?h@?G33BoS`u~hkkU{vxE!2tji-$LFs#}_oTBf!2;uUVM>gx$)+7BA3 zEmMc2rTliyYL>CYkF@#)$1TZ?(G*#lqa%Qo*cXbX9~};PPim9Ns-u7*oN?$N8@G$m zk}o9&+eQ{rA^o?Xfx&@uGimL{n;%N-`@p_U>iDyw%VC?B;Pz$fuAKR;=*;zQ>an}T z)u=2Xzgvm>vMppGrJ03;F*>n>)GG&ue=yka{@cF&KW-D{eGD^F_nLQ+t4Y6|!Tj7G1;b$#IpY7< zokp`R4ov2(3Hu9>%5~z1qWtYG2Q0nu!1}`@x(@C7V;##Zr((uK$Q$Y@NoVTsH~cxc z3)SID*P;7(NrD93ugLS26rI!Tz=J>BIY)W$ZA`LH&_~Crmgxw4E0r@Kn12FHgCR-g zOIB;1KRgjG@U*z)q@crpmJNbb%YnBS+Z8AR1LLQjD*W-h7z8qF6)*{&LHjy9s2K$7 z2thVnD=lRc1 z{%4g6e8s7&9P)P7+hYFv!T!C9gIuA-mm~V}%YWGe&ECw z;INK=PySBk-;wbjzeMQ(o=O)Y#Dwk-PYgOn>@5%umftA9N%-xi{p&h9slcsv9e-}5 z`lmPs4!0cFOosmYGtiRu*eJ@l0x3Y2bu_0K0CFZiu8Hzi2dt(UgSJ5y7}wu?_x8yS zXr*+_r?*IYtEd2jrX`ZMSCXMFkGWzbf!hGYj7rfGLo_6!pO#ocxD3#CJ{fv?<^HO) zcp9XF^T)-vP7K4_HH#d>WcE)}&FO<6Ur_)RXGgvUQqusE8CJz(reVeTwe#?4HpnkZ zTXDQ{<|r{7 z4FD<^z}z#f`t4In)@y73)&jWP0<^g)ua@aE2NlQ%6F?=C+PwEE=zhUa;~u{IV&t6M zO8@Ie7hs^Jz8V0$B)V7QjHBfxx{(ms5IPVQa8{?^W3VppY}{|X_OkP#PoJr-l(5l4 z(M#ZVxyeB$2q`S*U2Z$1dI4uEx{62%vp8fZc%AmBk5N^L-oA}xL?Zd^ji7p(*n ztFRkcW|;&PK?v8dDKe_!xHf?Af_QOIteGV>qpq&f>opmq&~kq;fLM$s-$+7A6a zncHKvtBIe9vfA|WZdo#2tIZD3^a2&sBJe6Yy7ZLh}_d>JwJlzkt>P=!1tUTo@$_^ zvlbOOqJ_GUPX&#}Jz(Ezcd12gNvFY_Zr5qP)@n7)u97mm^z=s? zeRq7@U_W<{3V)F+r>yfe%N$L85p3I+3Vm0)oa&hhtgenqc~QrG)rwBO$#LWuXag+) zsx{XTsT*6r=N}+qJj0D;B_?GJqf4C1yh(!%)CU*CK!rFHhe$Nne^uCLT3@|FkE4*v*_)J+}U2&+o3IzM8Bd_6}#OZVc$h zP(S`uL%hKM^sEokob>G9-=6}&B&RzDv*h78a1`_fRZh6hg&mquc<2qz6s4?0{?XEg zAb$W`f^8g6fc$&f$VD&ZJ;1gF9Sp6Fl=%PAzmJlU z;{|T*KS1+1?_=YB7{8g{&O7kF=gh@mlOtKKC{gME*qT(IxCXkwx7+ZA7ZhVEjBm*J z0oNix-fdq~wku2T??YKsd02E&H&yBc3^slOCma)8bnM z9Dz@$FGUQVdsf_y&xJJ=eedKX$;bU*;gPAm!LSSZyreTCX;+cmfTgP_hlydOVogh)g3Woaaibe(r96){baS9Mgna(u<`Vf8W8@3_n#Hw2u?{>DB8rS| zY=AJu%7M%eDo9F;VwR!A8XUz$q3raRb)+2yc;OOKo%z6xi{@JH;OOn{QD7hpGG?}zy7hp8Q=&GM= zpe+&Blq9Ykrf()X$=~!%dJp$0+VR|R`zm!QH?k0)yj@G|RkYa!_x}Z;|I0`|kWf-- z_76_$n$V>e{Q}D~j)+_aIq_+kZch%+N^u{6&)7qdUpf422cW2RNz4W@FzjO6rn7;E zOp@eTTjdRWvJt^im~awVWktOrO2@CbhyKjzX*;d_zz_r)Ai)X&c8Sc%^oytp zf|Iv7MqIRI`gC1C>jV2jvqx)&BJ=~Htzl*{*GnpJJ2V8};0R+{JSZy55OFxsdGG>a7G9(uLm*o5~zzT7bmBhwtUw8tTDRU!*r9*a++=&|mka}Q zh$N)hV|Y;rONk3*yD@5cFa^3dkkgs?34r9OwuE6v;_Z?NnPX)x$jXtR$WEn0aV1oi zu+JWFB2vK`>yPP`twCkApd+w$e9u-0|Au0hQluFu9CgERqVPlSg)d+3J^e+Z$)l%U z=%$95X+WEsc0A0@$)(TfFf20s13A-&agPv@N=gv952I2kd0m(p0eYAdzyYL>F^H3W zao9`PKa4kQsB{WAnaMI4kD)ygBcWwFQ^W8fJA%(zI#6GM7BlS?u-{}D*#|I73nLSC zl}XWAug?I_jcCiq05>2-hO6Kxau!RLY6&a)fM+p~YHZ@fiZ6xOOBTq_>L|UqmU$h# z97G(?kH;dZUK^_#uW6AerPytRz#j#MznJj{VErZJurf(4c@eW{N?(7HNm! z`Tmf^$JNvw4Skp?tOm{-YO&%bCbcBj z1O;#Ot6{iH6iUzHz!@(HlBCr2uAq%L0{NpEwBXZ=tX3oy=2&S`UR@MbCORJxVZ*IE z^l=Q=ei*oH>-04Pbh2pCpw{~bQLZv$io5seSfriBz?>HHu~yvXHppS~E$uB+A8e~j zgsy;0Gv~?^J0=v1N+AGopRZC|2~%k`0 zXfh-$(p0yc6Vi=!S+x|bsiWitt*U0w$*n{bPp6Nm1nGw5XsvZ)hdFLUNMvvwbq!aY z*{K1|*WX`c&$)w$i3Ig-0TmLUZk>jyEZuW9y%?qQ(vr4wY#HCo^?4X^-BOR0$&TPb zZ29;X#bf#LC^Do@9FD0@997**HyoHtzrrGWQj1B1t>O%|q|d8# zAu8U%4=E?Tt$BCn@;n8$ay|1bO86US^53^Eo%ZZ~D*eLhV59e=Iv+?4 zA8Ul#{V)_0JlQFGGTQR>a`$m{gSHCPp>^kkNkx1;2EFzh6Y#F|G5Bc^J*N#{^rImm z!m{g$^;>_q!oPB?z)1BVlXAVu^5-1L${x1{lnr{)@*@FS9FBTUFT8tc0y~72)ZA&$ zyjpxu9wG+4qH0oKPh&pVO^~qARIh|tMXoTH=42B3PZz+Ns{KG4V1t%nu%A;l*4Z#XBrFo*LJ7xbA`Xe*oU+c#VWRfm75j%p9Yxu#S`7WF2J^fnXa$jXvvC@n<+?1nv;DUXZ?eGnF zF#>$YzwA}iuL==qOiNTmfJ=L55>FGzllgaS5dx?NpL?rH?2JE(6BD-J-&u3I!^+BHDwz8RIMkDUYl2uZUlIw-5zVS)? z%6ONl)J5eY>M>({2aL(=2z;*011(R17MpjYAD*!K1%o0|w)kwaHhZ9VCathYmZm+z zP4yU5$Z0!k!jxPV0mHdo#U!^Tm^l-?xsXH3=oUy96;} z@>b@WJZU{3BX8{-SLa^itfFAMOxZBNXr+eqc#)G)bN@Ta%NHgD_Rl`{|g=zAACOS@|k=icY>rza+mj}wbZoO0T2{s!Q<+GVG`yx)=)=^FV8<{wWIu;(}Lw-AKgI|5Q(S;z;q z%^`$9Z2h@PTBT$iclF-w(6T&gCLa>ps$bCX8z&@a24#4CW)O1{RC8GLuj)bVpHbrL z#=JRPM}>K7v}jaKI1W!(&Ox<^ctc>D!xKnR;y-w$tql<^&wS3 zqBVY_FLsu{V=5qRMM1N%k(9{h(>2V{R5i<~xP^c9gR{1Qu+j zVq92vfqu&(VAc*H?C(82tI6>(XApVBW83MmE6Aj8B(A4(2;3m$d50`462X1OuN4JEg0e5kF zpzdMDHc^jimGhf-K3Sg))4IjId=U|(K# zqq+DN(gEm+#Z46(=1^_%D1FYC;My>zCe|W7ks?tddW&&0Nio_3JVYj=Dl87a!&%@} zr=EOomhFvBtY8;>AKy<=|WGanO=E4zA#h50b z7DXtz&M=*EjXWGQgScN?sWiG&-<-b=PZf(*j6xpGni|RL{Gt89#un5Ph0M1)(eeQr z8kW~Gpmq^i#fpYaxf;=q4M?Sxe4H|5R2* zIhdDJB4PAnBE9LO>FU_r)lxGfl^}3ET@%jh@mbBJaINI9jnW}+?~n8z1U9j>D!8Lf zAtBFZE5rNV%tTQ(v26%Yh)aBzZ?xbNw?WBH8$cm*0vY93V2>@-U`85fKvlNH@CbQ- ziQxfTc3RPy?&>Mv55Wp&bvdNDdrpTV)5v<;`Y~1C_V;;kJ*d$v{>IFoJ{KaeqVOI9 zXsem++BtBY4ZeZ~!D2nIbv7;&b$Xr?8qX=XWkXVxP&_o-llnyraXsjotR@MewiVuG z7{5QNXF~v2(U)G%Ki;`+jH`)P&Mu72#mSsCFC`+4cs7mo+&_$CD)vp%QdKJ*la;M} z7DVJ;SI4ws{ZdoDfp3h$!&)g|=ew7pBO|d>Gdc)-$nzx$jL+MUgG4p7`pU1YaoXrwBL%gugRlOOuEm<8-T!ts zVU3IN=)l=RA^ooH3)`BTp{<~|i*4|B1S9r@R6_ZlQ`dylsugb3h7J;bA6&2v_}%}|xwnK7QO7PE#%UEu0mZsLKvAaMy<>#1+qXx) zK#j3mhoKNl8S&fwI!01{mH6oJJ3b3tYj{R8vZj6e2I|qk)K8jE$&uR!t=Ije%i18S zvtW~`zRhZ5nvqHAL@I$SF;2URj?1e-6_LS^TpZp9bbt6sfSIv{M&f#;3)zZx0;(-9 z^#b58@t6inv_d2$q6rue>6mmbjsP$|RV1W3*d>m+iITpm)J1Bs#q?jA6)7}7J>J}D zK-oTT#TuZrlmXpIHHAp|?hI$p)@O?08R3h*BB5l(0Gu;iUw#v=hLj=k<=7X6hU9;& zN$>-k+erkmQO0=w3$Vfvw8$z@DRxR_;Z@A`r*ne-4nGi}hbRo$VSh>{p^W&mEWjUtA&gI+aUx{z!%U1UC95iYX1sla=!rj z^3%Zq_TLU+i2}ZWjWYG$Z^acZhxO^uAo9N*q8SB1poAz@h=0V+|0@bENp;(|^PUO* zVYU4Ar65#*jnSgmR{!6h{NrHf?9U^oWN+L2p{@R}t`<52z_C!e?HT{u(>m4xkm6IX zKVg9X%Am32091yOIH2&~p4NI^Ad~&C?O4wLc8LFPLHuix{eKJMf7UUGGi2!9ZGUYo zm>e1q1K1|jk!qO-+|9)_V~fl{jIyfVcD7|5w4Er_N1?#KPmcy6P!PKOi+1aG7K)lB z4%ogCC_^uy4P!zKrCNXa?An8Fd^Q-#D#^O{3MB1ScjPYr5bGS3z@{K(Lsth|z)=l_ z21eEo@=F&$U&;Znl56{0D~}N9vw!SMU9cMw4h0Zi8zh5Ufo9*&lm{ZQuE5=+tOi7X@4}CVL`k68RmFR#;^HM1pq&BI4fS{_~HTl~}>v z)-(O;3u@hzK$LW3p(J<#FyDp1`3q0JGnQtTBQzHDxxcgzbI{iOm>{`+FVJ7k`~Wa|Mw%Af>8D>~tgWMiDD*!DC9l-$D?$ zYH0q7v2`g4Bh>zQis16hAZE||oj3KrE$e@+qD~C(#uFTdk^hmB1g@L(|AE^e z(9!t^Q{CvTrZBOoQnf}dKUJYv(1 zFTidqkB$EctfZF#2<{0|Pl!V1dJmwzkQ$7!>HT-0KDIjS`SqQ~l)TK@oGGbHkF_C5 zvQP>F(LoQjZY5b0s-&3j#8##~lzYML#V6JRNbXaZ0&T(1qwt8n(S=P%acZ|@@AdO0On4!|kAQ#{Y%;?IFdFhC<^p!};u!QT_W zpxwPZp}!(ZP!j7tfTGq@4YW&BQ9bt_zrAV%odH1c-Q_N?v!+@A4TEpC8>549SkaL^ zQ?ZA|w?1vq?Rl#<>@Pyr=f4)PW!Guh1`CD(OfFdzc?L=kIuNm-nJ4byyMQihdiOFv zE@0O^&+jOEo))#rGHri{uv%pY4S(bpL`(ofDVI+Hqj_v8lvLY$z}Z5$d$!Bb0{4yc zSsrK-fSXX{<9&Pp#!&0G{Mz)rs_0|DCBAL#$x>6~i?7h|# z;_24q{;{~OYqWH=Y25T-DVjR~1Y;R6F-+_RVkCr@PMA3J_P^lbFTz;pxG}X3KodWV zwYc{W{W8HQ<~o@9be-JLCF%EB19Wr3qLWk%E(`&0o*S_K8vwMw*$Op?h?95E!DQ$e zV5Z!6Oj=a0(2(3A1T5AG;{uV&w=N`X$;!1cSM(8-4NneJ@|528ANGR+w7VGo(#n=P z*NhX8F`IAtZ4>vpSR7InRond4Cbdj-7=FYE`mZr#5n1x-w)m@QpQ+R9eSSb3j;4!M zez4G24$<{R)twOf(ON=5|N4Bmu{d-v|H= z$ED)ySa9Fs5_2ZK_5XY=$dQ7ruf;)SABxK$fB5wW7)&ziZP{XN0&lzn(BS(<(YTLU zn5cn5IMeY22og1(Y+e^V;=X%^M@rIH2al!$B=XE99SXw2BZKePlrLfELoj67JPZtH$q{J-QGxI{ycs zAq&KG7lj@Hz(Z{#x8S^;5y;vPweIgeYQY4f=>}BEQGOp$GGsh&-D4B#1$}eTVyZQS z61I!2eme}84PE51p;D0-|N8+_+DpcymogWN*V#WoeMa+dYzbEyafZ0`U}94DSa+)t ztbp`nf)V`iPm_A#;gKRC)4>)9&P|AST$`R|(B{^iCIp&(BHNSDYz2s^bY}s8TJ)BO zAyRMy3#ie764mNe)IIf%$|p0RabfZO!^EBFXr-Gn09B$$wriW^j>2CflId5v3A0!h zqA>iKdpw`MSdXE5CVP}Q6bg^>ya2vB#I3i)uFS-P2=CAhH>Lh`t=8ranPiNhbpt~W zr1+W1TMxjzJ)hGkx}Jd376=R96J*Xg=BlJ<9*lv>^-rl7t_#9hx9@-2{~P7aQiSNo zQ0mKJUFtr-Yz%@rqiU2lhSDR{>_0km_E}+A?)n&G3a2ROwAc$@_ry;^;Xle$kVq4k z_7q`B#Ii06(B(o>JxNj~x1wqk@27J+f?0{xR}RMHsY`l3dI`YXpUyDY&S43*G2&Y#cir%xe zI8!zSGp1T@i?ta(xlFQMq66++I>MPSRgnu2wJC{{`V>o9_5pv0DmOBX{;XS715?RXUwDXCX8vsOF;`du>_u81?s6s?A$7)DU*t|NLS>luZ&^wzMol}9)A~(M45WVs z))O+q7`0*yA?EA}+3os@5o`OqgfHIdFCShes)y^{3Ry%EU0ZZ6t5oql>{$UQ zXImcCUoRUlPPCuMjHsk|ZJTUIB8W~=TNBy?S2HQI(9!{5rO0 ziFaD{O1FN=WYh8xH3N^j8Loi(^&?@q=<0308yEHQb@Hii3^;L(m%3z51-AtcR6Xn~ zm*H7rAyyc1JlfYGrtAFAuiw|wvQa@hn|wP)A{1Z4qhDvJ8aUSf#{&o|2XC+jCZT3E zk{{4K$&~O(LI;S!owt@kXn5T~Z+2B0wD$R91t1FjVY|L2ZwKSU;j^q>M)wa>K3>2w zJ~q)&8jMI{D5y*5T>`FAYcN)taBFOMQ-7xN!&N#OItM`TmAA+rFZv+QNQ`(MV*&!p595sveQt3ZkJ%4n$* z@ym-X5MhlWcdO;f8H{TVVhuz(GF==#@{nA&cgpW9%5c-~8yOioQ<6`$+7pJ+s@!(Y zNvlY1CN?I~t(#^*Oa$Y3_MKzIw4fO4xARrLkmvIf{6l~5VR(Yq zrYGBZEE8IhMF>&giEboPPl=@(blh->NrSGxi$}Q-@5ttIb3hP~1sMvALZs5$M+1GF z-=G8o%=L*;#|rvj+qaR|-tQSH4l%YnmGIdzosZ^;{B&JIBggtNGg(|RWvz>g4w(b7 z0bl(JN$NGiVU#V-RQ!uV^ik#=cO6+ej!U>I__mY%YyS>j5HR{8>QBoS~I##pA( zcB`w7g=@DeE7-WR`O(+cVH9V(oRl*=&3JC9oi=TjLJNUD zsmK`2w_+--G$jikNE|=nc5}mP-j2p#|?jsky+m4+w{6x&lsU#~LC$ zN>4FliqlC0Yb_0}a9Zg$WqnyE7$O$is-i>;d_-4OgTdg)w^B<58_@e+BgJg6kMI%C zI0ZxSUG_rVrR%RM$(Wt0$e7W|su=D%*9j~ewBWMmG-p-cc(eb7$>XeTPz=nqxWfwz zQaZ!u3Vnb^lWIT6`|IaYe-Wyej9?Ss{Q`WYH~9T34zArsPMqW!sGGYMe#W>TVR1=Z z5ke}O!AWja%5WD7LDKx@*r4G%>*y!u-A;5$u-0GesX+;3LZ>gtmezR zNk8_-cqgQ^n5}CZofQE`5tL7Rlj(C^l|hY|R2r>eOk@_F&R|WL{NZ3F0IK5OgPoDA z6E2UVB5Ae8GgBJT*sK{4_1;=qmDDhmjco+tIPmX~=@3^37i}>T^jiB1Mx`~2aHrfk zTU2?k@ToOC{@n#v%JV*3gKj?Q(Ku16fw?=4J^P;688oN)p1+Re-M^tJEfnvCITSJ5 zb*t~l<@ZT^#HRDmH5p-KW;*D~xV}cg%vm7D^ge8W;jZY90;1%D?61dcG69yX(a&K{ zXOwcJhV9}@)+{%LsH`+zue@Z4Lz-}{yV*-qa0=ONe;riFq}*a;UaR?zw7R1SsK3F_LD{NW&bT z?-AVWj9;l!SPsAvezXj?y4W8}K+%hMMCg~08 zk;s#W=Cd-66z#6mVXL}5H6qq_h6xyu{Lfg`7GnMHWP1;W{k)EPNsPl(43%iP!AJdQ z`^9l*ivS4*juF_0;r*?)XH{8NV8*N5V;K$xWVlUCbCt0mD)~|3tusO)Gpy`FLXVW_ z;HVnC_YuM9Z@*J2*jsU20C`G~erT4}WkkTuQG>qU3tdRZvk$j@{n%(%^A)^)jT6Q{ zIyXWr++~{EO8Ty7j`Q+1k({-n$t(URk&^D0ClmAl+q}J9fo|geWZXSM7UKppUj=>5P-zF30 znl-VE$WwGKB}ay~zxwbo^uQ|bJJ6h=!57W>_rOW`!Rk z-mv%UQh=v}MD_-(9npwV^bzUZSEMQQ?gR5fBK+EwJGdyfcwh&GMV|$acrv&ia}y@X zfE*byAdnJE_@7;gVkUww3{e%J|lG~&Wo z;k2DxnY=oS5}-?vR>GxnXSKAY&S!^Tc zld1AK8QS!&1Tb0eKK(=@=Q0go&Ee9dAV-8GF5Pn&hA3&DyzazKWTi%%b~Rre(X(`G zt9CDvrSR_&G2Y);|DCjDR9%Ed!lI>8v7GUMnoz{_acWv#c z2*WeYO+nlp!?XLohsi?8r9H+%aOukmcz;_uq0FaHS$dngbp``0 z8}klvTUS`RfwVT>5ut#!(HlA{jYC;_y)|76i!@=zC6#Zy>N)ya59PBoY64E-4ew+xr+%&k@ukT0U~L3H zo-81!=JyUuY|QMWmc|BuP>Neud+E6$p@bc#RN5c+Br*L*65Azh8oflqHAWA?VyuXI zS`EaL%!w*WoT^nut~rYlE4PazFH^T$=1|u&QcqXRA3wQSdgJ zvFtCUbh4(PBH<#4j$_MEkJaOBRCT|_bEC!L=k!D^KCHpAD5m)mB8c6e`k`hN-)U7c zB~+06w?3epF`72}UUWAKGmOCl`}AcCUTMf2K^BCu4{6%G?RW!AdQc@~@)mJSmiyie zwu8LrhJbqt36!}AiP-I1!G?xxH}Gugc}0n=RV3IRuG*?>=K>q_QZV>AZB**T$dx9% zB`E#+C5qkn2W*wVt0)+GbwVGkp8Rti!Xzk-79k<1J-jP1CyK7KiMTlaz-AtH6RB@F zQQE-L?G|A+j6AODxifCJPFUlIQRbGfGQx6OY}k5&qRQq#$)CeY@qtq>g`J}Hu9?~) z-fE3|g;XC34xSOdny)L~Yjw7dd!XR=Z&hdIvpHFL?xb;BDONE8$xz>QDkDCL!p`$( zuj$v-dM7?Y8h7+lZ_N7eMdIT=dJPYg>8el;JZ8HWOPs~u;uYyW4|_Q+4o+mHGdl5~ zsw|TQA&5}0***#Kj0T%GsP>7R{Jno09GH$g4Xu<%nIRu{wwcE@dfwI?L1$^RQ(JW$ z$w_H*)%5H8FRq6R(x1>1x_Kt5$QL^WqVG!+F$unVoyYqG3GWrFtG}?@wuqY}7h$3_ zh}!bfQL}V;W>y@M?cqizXK{$=CmgNUpuCQ)j&hkbaHqmW`O1=XbEHu;CWvIfQ$8jl z$>yowN85Xs5=0>%R9Ns&JMz^=G>NxNjo3%zsbY6(BfnS;6_jVz;y1_oooX;$D_*hj z8MxyGM3a=%!U?xU5J6<~ujO_44Z=Z`%NnAbwO3_aGug}Lj? z)9sfkm9X?C3Bu{)0O{a+)d^9X0^e0&5Tl*tWkfmR{k3Kbi=y)A*qF+1fE2sywV3_G z028m@8%;7?h&&5NQEgK{;IUIOnO+t~pTG{5f2^(hLEHsbA`_e1nWe5^|By&q5cRcR zk0vIZ2b;^-1K)8D^s+pW5q_S~#Tz9<&zsHY`S;ekG#Iek#?1)A)@cI}mVEbF2zm5@>?NFS5X33+EktB6h{jAaK)k=K@U_xYPk`dxuG23>alstQ) zMfD{4L#9XOjI&4&ju80&KlR%~QI>3WLpAYWBoCdw8$Nx;x3k;~*SnuZ)+x}Y$*@Wz zDM4-5LC$}~L^;!#A!(RPeTPsfN4JV-86JLP_@ntj#p4)H^o^oVuR?c5+Vd>5suq@n z93%xKWn}RaiA50D>8L>hW`Q_&5Mt&0#hDW)M&*Pe{1>JcW^}#Hr8OK5f?6(ETS4lX z$HZeC_v)CL$!%967I&cecZYBwQYS^eWvg?O9J8FFD%Wi;0#g#|2=241R<&Ku?FD4; z)8mw#><>JFcc5MKJ-!{zj4?pz#3@VOK0r#j%CAp%If^Q*-iU?GBRn ziy^O+ulbQ<>Bk38iZ$1;(gBVO`{Sbzc$?I^0+aw z&_Pj^Z>~?jY5i3i9Qj;df#etPMIAsGg~ke%99V{`qO2O$aX#IgOB<7F{mSb?5x9hV)mq;Q8MU1nQr)p#Wr0-bS8?d)0K zVEjJD7I(a#z2~aWV--wSUjFRh4-hPS5aFzN+OHh(pI8hV0{~!fC)aI?d()ngA_ZuS z1ne-8+lU7y!N<)BRO9QuRQPWQQi{BtJ7(8X@-A$16uKb!10l+INWf>+<1pIWSY_kO z+~SvvLR|tlf$*r3%3@;C`6LgL`klx$lR!$Nphq;^MKl1+33VEp`NL6(+P;Y2xe8NX z^`xVFm1>h1j;`DzRaPTJ$Fe+vFLvpPPl4)tjl`?4bIcId*}I1Gl&l|W{vNZjRgP24 z#k?u-<9c!E~c9ZA|%ql$1rT)Nd?~p<`V{xYn z%Vl>!3*}zuEV+amDpOlpy*s7fm~N|#jNfM!#x?6CU%*IBClS~+d~Ne<>xY`+!9KsD zer_4s69#S9#p)$?dW9cr!zzw2dGe!ht+Dm#PI@UO@-@g5st^!Me;UzXRplW4GWs|Vb_!fGy zAim`sXqYiE)tLeG=7;fS*b_u=G384pS9_GNZk3;UtIMu;&kn(;c6hV}`Gw-{2-?1o zu)by6OWME!CPd>!eD6`n{UXGe9E$d-%NMb-iP;dust16LxV3a{4Nik>iDBhV?aa7w zVmmoLX*a$>&$aqQM0xl3qNm?DBG)K!?Y+sQ(~&I%mPSHP8T5#5_X=nhNsryi#tV|;z}UfYBuf6_0CZednjyR zr`!qBTU6umQip2ixW%c^NK6iBEzMVr$z`>WggwNf^Yt?f)$Ulrl~8c2>lf-fHtzCT znRzFQ*$7ksdtzw7&U6{QTip4d{D9_l761oJ&UENsW~1;2xbOvyr;9`5eqfB=vtg%J zQB~2k4vZIbZ=n?Iwf9Lqj0I#?BhJWuvG|d$92uTl2HkzBu9iEM6Ta*@gQ|$VZU@VtD&+LB@>C0_2a}jCr>Ucid2BSk(UooG8lBTd^ z6C+V`GA(x;*NJ8oQ$6giG)tB-^@^L=9ZgA{T^0R=Fddhhb5eM%l1>GCw)X_B#IAD_ zdeFBJxOjK$1ex)eYG z?PiQ9L~u|uHQ|ScaL0Sz)W}1*gP zg|5#_?@VpScN>aS0;b%UceRNrf`r-@yLg2XH;%?tCf-IZ`tFT zwe&@p+|_rB&5rC_6Q1-`sk*wajj}Atc4`5}lE6rhkx6LUa8gC(wxSD{bP}}<_x?zA z(e3-VG%E7Fi|0%HhZ>w+H85J8b1r(8Lmt;kN;?xl^Q4_8BeHac5r}VAgqgWJv})ec zQKB9Se5AEii%YKxqw;T!$JBGMI54(3w$+$5W~2s{icjFo#N6R)6N1e>+tkll3bwP5 zxNFyY5NA-sjAviV_O8t$ZBG!!{Bzo#XT*u}ra3ZSXfuq@?7_0))+l+|T+|_a z?@Mfdjd366q7x4-+a^Nb(3&&h^LO{Cez5inz8z2dnc`(+n%L9D*J*R--u%x7uqBKX zS@v+z9%ek#n!t$0@nBNr&ex-@RR=VNRweDmc$AN2Lh3{(B{v(3u48F^96ItHZw3+pYvdJHvV4P>uCecqMz3cthLK?QwYX(H4edus zi(K9i5DT)37D4gTucKCYz6e(%7QB`sxJ@M()xqadXL}rON7-q~bJ&b_4ajtmeQD7k zbQblxVv&iNJ+e!xfMsIDSVd$z1-S}N?En8T^DELO*vJ* zhQN)CHyxlj?`;e^Q5(%>eu{FG8btMC_YI)qQ_Ql+C+v1!Qi}B8=a<- zz2=GyytM?2b2o3Z@#IFv&8(77Z5{&XQyqboR6B4Aj3&4{I!E3Rjng2h!&N`~fU|;{z-3^;= z1c6O=_olnMkp^j`Yt!Am1wk5Vq`OlT=`KkX1pU3-d+xpGp7Xo^uw1ZQZ_K1#o6EX#^gdtctdG-hgPziZ_ZPELerve`O zANLReDDr+eC^-Kgn&L1I6=3FF=l?+T{~=}lha&ISvnW~o%e>J)-Pb?=(_$STP~^3# zIuHM^OJvU|&An;4t z9cgs}8m|NmB6hcAa}S&gV+vc}{QEONouA)er~iin;C~vx1Ldw58L&l=9x=VL{_pp@ zTL8Q*K{tyUW;YD2iKD!Hxyt#9;w2BfP(Kfcb91MN_r+y}U|uAdJe0aYMXuw1k3e_!Q4<%)p5 zwNU|R(If1|LYC?+g*?@1D~dX&y9iLzrFr{T+$*w zjMfhzH9be^JxeR^Kz{+3{tTd-zyL5lz$qaNI2x{{D#4*{>H;(nK<2|M{9-_d#t+Ox zfS~aQFkmxRKO0?u3h;=2S`(j93CMzg0veAIcf3=RE{Q)^$pYWJ2`m9rcOk(1srIs% zmEHnEAKQVby+6Pfj4GfUzU{#Y+Cq4ieyZF2xz7Id2v}R6J8Bky?bBW_MS1q2*8^z~ z{~*&OE>(>^vnAhj?@~SknfB7#p8iOM0lQTz&yu>OCqT!j0Khl);Gdn9BmkN|V690d zeHaivUFNNJ!>H_7#OYO<{KrR?h4NRcl2hQqVu26!7TDH`rSBYlu8rwPr1dN_2jG?sv|@$Si1Fvd?CGV6CO|v>kTHc%cVf z2Iy;Cs_aXiBuxuROB&j=M#ShpB|o}SC=peCGE-lGeGzHO#J9kG83J>q~Qga zFdu;Qf+T6xSTZ|OH0VbZoL7JI-FkDGgQ&*a71#k|k*F!Ab+7^hYs>?j^&1UJuFI>u zZ(CGaWZyMx%z)HIY?gNgo)dQf=msE7)kqk>iPe?3dscLAMc}BxMZHJ{ica9x`Wg~` z13IHMVC(d>QN9%CyBi_J{!b1!e+;lj2A9(H8ETECevpx8JHQDJ-vBIBT|jl(hRLHR zYOF=MF>o^$_;@vHpLcuMBmfH)^po!ZHW2&-VSk^QBr`L~n1C|#PX-r2Fyg&dWdyeR z_UN9abb#Y%MwvK+!1m_(xdCuyJ1{<6jhEuT9emb?F&DVQ)4qIHC2oZ-aU1)rFTEzB z?c()OpxsZGXABq|Or?t(`U^O?wfr^`@(4pJCf0Cw$B-volgIGs?0j*G0+!g3>XC)w&@~h5iWqz>5Fbw+uXA} z_I!rU!!y23yZ28LN8EeU#JpdtA{1gk&@V54wc`!f`GfH-wX6R#f~zk86-WA6mNu^e zSWx+0Tc@2hqo*`uNy3<9(AS;-@xhK&X%oOgkxu^|m|N(IhMC$|6>z!b38;X2`@KT+ z(0>ZzavNjc4T85ASSXeHr1&(Cjv^%eEwR;x_NEDPVa>5|M16pV4EnSRcuR#d1YU_3 zy|Ww2E{m@K>z!e=&xBq^P)BwkX5rmmW-!cm`_-M4quk;}b=|f1)k|jiDt#}2z{U-8>r%olf39cZSXN1e; zcE0N-ogKfe@C3V2r$R*&hf3KYFgw1d5~Tu8`?dN(d;1bOFwU4LOEPz4?KAG|gou+I z=cak>t?L0$L2T}w{nA&<7)q_Q!KoC^vRJ077u{m`XF$~sWEKENBIbZ3(zdn;G-aK5 zIVCmNlFmM-4jkg+{VAKLnYhjPf{JWsMMB4fL>k%_?*YmDGqN7((3%{4W-a=fzC62a zMERQ#(Sx{{*ANByV7d+$iJ?vEb4)1`4>~mL2FMrsm0{ASdLd5<#7^=7qHE?0AP3~xii}}S{pW<}Z-86+`N}z?L*+MbdT$D~ z=p3;V+F2jDspDhUU;GjQ&i5$+Blh$ncLm}Decu_*#zSlZ-VUI{h@t1~_=69!sny}? zF!Kd+eu^}~Z{A11#y1U^pzp}0*mk9p$d2&RvbUdw$$+6iU4BTa0niJlA#nqRBPMo- zqDK%g{=8b`96ZgE#Loe+`Zz*+DWV|OQ5Mz_McvJyQAW7htA&9tY8tL^2V@y`&b!j6b@iQMX43AV7hf0Y06cA3^i}sBEzUy;jIX|EYjZTW@xceZ}4l03ZpI{_*CJx9hj^2M}?j?}^Qm0T_TC`nv2w z+2;(l)9^&6pzlupgBC(KO>fbV4x%c+;{qz-5f4@{q7}5*D`*t%ZzYgy_uyAipRYYhBuvh`qbt_BS)XzpG$D4o;?$ z&O6sgm(yV`BVfe-x?FfU&w-|{_zJ62Ipd?*3ux6gG zQFDoqn|l_M$7upu=mY3#*bHq}(TsH1ke74U5QFVJGkeHS zBtjLlLWa3Z*z|FqD)xzu-HM08S%)%%x6muC*#pWIxoYbgozsY?$W|hpy-kM?aE)TJ!Za>0aQcZnE=ql zZ0v-BIA|RzocyN8c*(hHjzCqNA~{%A)nqnhg{^2Q|pt{J)v-2-N3^mjDwfKLG!(mR2Zh6^yG z(+X`IcRb`ciE891l$W#bVbBt`-rcMjqMWbJG*9-60)fW9BVqW( z=e_V9SLxgiMXw|JYHWIA;&$&_56d6iGu}sTww41}a8!vo+QXSMbE{#ZT*rHuCW53* zTXX(n6W#DSRQQy?)?OW}t15L5HakRPD%1xz;TI0p_3)P83Y#bl9Rb^$rtBVZ%Lo}r zn6E0{&2zkT zW3;vC)Lyj@98eg)S-HUL9n12Xjt)Vc8`X|UsMJU_g`{Y^TwgQZv0og3vT7a)u9lh3&r8GCPHPoaw7Tya za6|3z*8Otgv2%BBkcYLRqGVYBv&mbXy2gI^>Cl+U$Ep?`ihcV|c-EE<$-r4WCo z;nRqo;iN$SRK$}AnoYg}mzq8AmR{B(iT!N7_H^4I+tl;I*9K|vsGmMg4r z{>Y$uWpI#2A_TG})=7cc2%GuYS?;8ip70dkW4gNP04sX8r~9`=qg8fHIoqskc4Not z+D+5gu!PLsrKV8q#?l+Ju@7bYO+OJWx`?QLRjEmq$JGPxfQ5s^W%6mX`ISj9Fqx>{ zetz~79RFA==7CTrSu&4XcmiVJVHjQJwME8Ij?A0N?51ePH5U6%5A>o>9~gTqIhp;j z8i$oim245D5TVVsx zip1(QYGrW<>r+j&m7hp$-v2ci{00b)VC-sgf$~70`>#uHhfI^g3XPdBI5`ro02TN( zFf~>-+3w6)Xd;EI4K@#6ibqR(152&ZiZJN1=g~}aWdzFfGodjvwfKce{gbg2?LIA zsuwfv*7SU3i>X}5hx`2LWkL=2i7)d`vuoPFW3N{**!Jd#t%w`~xdXRhI6*JE*Sx^W ztEEgbKhC!rB~u^Aou)MNZPmb;DSN zKaI31C+D)>KdKSV<(Gq zS#~evCvaGw8nci!7|3XgO>uq5nx~KKx!3dFtA`b3qLbFh*&Naa4_hy!)9nML4;<#Mlk|F>hnme-^RC$=Qv;u9(+@$& zh&J^s_D}cBoU=u*+`Snug2(J_?@Bv?LVFHf^m9wCK4LU}^2_Jo$Po@VN$*MPWU(ym z?(i!_PvA^bZd1l*TKCUy(k}6b`Q#9wWUm*cZa4BJCrIXuII}APF_-r-{=r1 z@VjE(sUjCBH~I5+2tfq0F@d=cWSU&%~stV$m!LPg(Ye8@2ysc=gP@e`!di&n?QD}I7 zA(V;Ms;@EyhQ;W$yN3dqO411{lA$IvKeV0qvehhN9zVqfRyn8sP7!_B+rY=S0M%C9DgwLyP#YD)3HU*Ma!di?6AtQ`_zC*=8Br3Qy#kVTdU`J zw6e_c#7`VfaPD!51Pnal1F?@l1VmCGCR{u6jZlOT(;Z7C&QsXaD3D@R9Z)&ZH~no; z1^?IW#>1d(#vwpRB3gAjY+CM8j-=l3Ogq3eE+7ne@AAXlbp(K>LLe8lZ${EvA;;#1a5E?1xcD#I$f9eIb>8S4TfErY?iIf# zh(=-RMYH9xW#2Gk3&(g@C1UMoD|lWm+)cC^W9~$fs4J0KIe<4iM}&tM>Tp9hA>C=a zHFFXD#@_pf-|{=t2HvTa(iW4|`9;6q0keS{GQLkfThWYhCzp~_jB`z1*LK4nxa==m zYeOVm{heZqctLL7si=iQBD(d~*vFSsM*3Vzxy)^TYAgv~3aFA3WGj$5@_>f@v!0VO z?+=|C`hDSY7c+7|LP))P|H4qbL(acFIYY*lxT&x_8N(}f7c!Za@x(E<%DExzFCDV~ z(tO+6o=j}v6R=5QjBS`JqWL*P6h{Tc{A#yhyxD;aPawFd&SGb?NnLNp^K*Ch4{j5C zUjw(&KVGPje$(UH+qpQ7V*=c@{pml$AG+|jg@zCNuH$20#%kYbl6$2bwQ$7zY$9#X zze!%VhFIW?ce}w_Cg>5(y%!{Sb_#bc2XVeXR!LgOGmFbE6G7BGRCapD{qh~bFutIV zvn+xqwU_Lhe01)Id1-e*O5)NrZMs0$bNI*Ytm3;rTl1q4h^=-NA6RlKaCG&mm@(El zPKj){+xzJ`Q<7;qw$~M~W3Gr6=Q`yUH(@9dqkz)cHCwqBix3H+2Pi+;cFEXJF z9%HMc>cJ=4+_1Zcu~ameb$ebkVj#K2?tr$Tx*#k_c~UOnb@vg^>*8kdA#g8CSHaIOM;EHs~WqWs{g+ToC8@$lhXyqD3rY-WYcf`okqD_)_7Gy<(ps zCyuDsf_4MoS?j&1x#pAiHC7k>g>m+Rop^sl)30_XA(PiM3~+GtguVQuehTO96<>7` z3-jF^bU?Xp1MwZx^k;s9DpVoQwRHHO{O}o@fyWuSia|2vNuptL&G<;N6%el8-Ql)3 z>!iik-*S>u?zD7+tPnA0&8p>V$FTDg1S|Mf4Xm+xk#Dw=rcjI@fjv$qBa|D&ydt-6 zn8sJq!|idcNR~$u9K|>X#i1czvL`%G8R^sDNg5p<0)Ev!c^Q!B8Ae`pWL}pHc*SYb zW6E=3+tOg8W_|*+bR^kq319VQR&d~=iliNn(@n5#L#t3+k9fKnIySjbWmlF^RbEHq z)v=5J9f@E~9y8hX(;|5wjqzYD4St*On;_IUE68tBh@E4jp{d$6`)%>iSq-{(Y$)MG@~2SN6BQ6eyJ@Q&i4 zVOJ-^r*OLxrUI`lJ>!lH9Kc3t*z{cHOasN52_7)7(-wAQYW zdySZb@yeAu4L5V&!01S7!C9bcK^Wtn9QV#N*2ma)6mH!fVgZ=u5OXUVwRDzO@)xLZ zA3vRBoag6$be_h18v*NgqQ6iDdEO*`>vJ$H3bayWAAI93@JVFRv@rnniN$qdeMc!5 z8{}qQ@)Io+98{^AxVd_c$zGo}Ng#9k2x$t$!s;3{P z%#VI*_U?5%I#D6(+i6-S?4LWAhq>ur;gSnQ@3lij zF6Yy*Ll#z;{W|Q__9!UqCad2x8bVJO*3s#&PFwFw?RsiB*Y9_PWKEbH!)U~@;c@f= zuPAwfEVVI}xS~&ntSfegBjPLW{+_j$4y{>PD!f5Kyon!4!m*Ui^hjdC zZx{Ij5Z{v4FH4r!jLJAZR{Dv0Ud|2QAb+v#-n~gpt`Vt-KA7DT!@^dL|0ZzRo>%6U zq8KxJ=BtmczPnCbo|41+oHvb^#v2&CcVx8Kn-5!sR9DS(+Nu4VYn)h;=^a4vRHDe=`Dh*tyIpFiy43z7_#jhAFgEw6t;fpOdeq(cCy&}yC%J3F_OR< zH_i$rom6^gnZ)HhKgj%`AET0XY&vh+aoHhy9{|esn!Jik*-uiJJ^aAl+vPcKO}O|d zf}V}3E~;5^?GakB=yKvl3SumwAx5PozBI zO|hZ1V~tF0;b-?D_D(hWLf9*)oni4Z>{w!*fSxpJHY7Gr$*0T_oa*^Y`f~q?y8Oiq zJhSkP#{2Y++r5&4dzY9nhHw|z!yiM7LcRk2oJV;sH(fkRVd0#gOTDft+=m-c?w zI|?$JzrW2%HrU@qc`e1fmziS{P^Q096J%8KmqvFqrvQ0(@benoW*rIPs>PUkWWCNO z5NyJz#T#dT;LNSyILNu&%vPu??J$nFSCSxMSVn`4F2EltGq?l-5}@Q1=b6i(wYtt5 zl2#|Hy$9+sG=)mdFL&e0?AZA~7kXn(r!urG2L^GcBT$mdX@3kG)=KTlMTlUlo7;xg zw1~u2$oE+ccnS6hG?SKvBWB7!Q{tM)Y(o_;EaU6?OSBk+|k+R zMAUkzfNzBaMeRF;pbiCCVe*-1h*tl&b&qEdqr$_c!A|!8d zTa77GmT3z@ktLbwg4|;w6W_LG@m~`)_+wr7=ZG-bJJMWeWLkKPJ!J@7`H=v|9o^nW z&sMrKdE!dKzI4obHPS%O_(RKftj5NsrgP+itkyceECkA*^i0M>l@lD9awF|VXtUO& zoX3p?>g-+migI-AL$ zOz5>ci)y;S?S#*&&0EHaXFLw?t<)*$&Z#j_Q}uHD2G5M!`|%P}Sw-+H+nkks<++}_ z!P{^5+9sVq-Mk)n{8?z7@wY0d8+`$R?JMI=MW#5UJKi!+CEO!jc^kF3jtCL!n=n;9 zz58baIl6T>_gf>VVoXLHsp}ulu|gZpQRJ@D61 zvq&M6gjT6wWO(j&$Kp^8-!Xj-EL0u4_gHtpI?eDw<9*P<)_nxs{u&qmHf|&SpMsql z(}Mj8B_y68nydr4^>4dgCIwD^+R&E4t?ib(gRjA6^f&Bh-=g=?o})ONpo zv5WL~2)9)P$*A6%{yya5zXiM-X3<~b52P(gxl^sVFm|ZcJ;~i1Rt(R4v4xEzE2idE zzDAubhZ-o_!7v3;7yV-+qGMx{-4Cm41!EVQBdiT3grFR1n)&+tC&T3i_XT0htDHZa zD(GL81Q4+od0Fx8W56FsRwT8~OeL2X%7)fYw0Ij%rdJ3x*Rx#I9!g6TDWh&6Ucq>%q)V!_MdW-guuyRMds$oRVNVHihB3GD|b z|HiQP)MHlD#22s1P`XONTNPxmW~Ws4naH5%>`znX{mx*}o{i^k2ybz`*#B+xTdwWv zT_YH2H`R>?_rhT%UF(u^lB%$~I%BEV2z-J~poo|f1i1;<}DJJa6D-=j`2FX(WU!&;0)B^M;`c|YUB7ybRzbMvCP zhK1tNlmXZ2h_S>Q4k<)dxdKfw)9M8pBjK@P`bI?rp(9<-&pCw1IYid@62){nIk^lv z8r(eZ&wsy7UD$Ze^t_qn$Y1a2{yE;9%VU2c@+7i3b(@!Cc39>(hIDFk!kBYfS2^5u z%Byvddo}6M`J2@0Dpg(FkJgg2`%?OH^5>Gb<6A~b7ZZu{hO8v0qjzGC^Nzag?F*=( z$1l48GCg7FBxorI7TLWe_<0UA7WnY7HaEIh*xE*Ru5W8>a}NJB=WBLiRje)3yGFT& zvssfI`C@f+nVkD?oA>PjueS)j3AJ|wc~EvCe+A!7qZGBpCFD7bwOh_W+2oh(94fx= zRIPgt$2)1BNpF+YV4+F6j0 z_zJ&64c6-FVC}ag1$Hrh<6xseMdV#fGDA4#8hJt=b)kiz$^-d$SSohb!k)(NX9NK; z^JSFm&hnSw*-}IV%au=!;}$aem?CZ(jA+h@c6XQD#8w5o9N+YKVz4o6&$tL1(UQ`} zKvPoYalWtUsq25WAk_+c!kvCjmi?|t%j=!Q$B4l3f}D1oNw*3~)5B3{>c>%c%1|kU z+?v%PR#n(1nent>3E8Vwy&nTZXNGy8s8jJh__m?K6=|QsrIXu zO4_Bu8CZtjx7ETf{|Fz6`iJF};vlVAr))1u+M?j}KG6g;NMi5C4TN3(b4lZ)cNxL~Z~xS-@L2xN zOHA3sG$i5O93FEM)Q_umB3NvE5wWzKibg`0VRvZZFZ4RRzweP4I-LmS9mB5o<^EEG zB3)Jr3rSTzM8`(uI#cmss0I8wf+=tDCC-!@X$c4MVh6N0V!xe$84!7bTm4BEqm;ZLCs?JK=7w$YI)#su)-fb4MW3pB( zlvbaKB^#0QeWWSf<=_K-A#E7X7Wo0O8a#s3OHV^kWH(lxYLX=ek|6tWufvAQ%L%*o zQyTmbD9AG)3ff8<5Fw%>JE9YciBDCZxHK)(Bn=U;$W*=2s?hk`X7@6xtP>8t>K+wZ z4RDp%f*A69Ub)Bfx(lB-_(Wu7W`j{Ce{pcA%2V1&%c$iAK!rZ|B*|(x;uJ@Zi!$p) zI~0$SnydODU~Dv{ri}%`z^sRjFeJtPnNpVktx4ptRe@<3iIs%LQi-%E*!yUXSNHiN zl+^N~ms?asN$8X}a?DSw-*FE6k54d;h?ESq>-Ev{IQyXRczNh9$-ThF;Alx%DN$68 zM;EU(={GT+=oAx%>4JRX^df=CJIRe+eTox3_GslKXJzC`hK%!FN7xj_z*5*6rYWhj z8t9hXx-vsHQ1AQH(yp~30%V{C84fP6c(M~MORBLWKH~8u4CzSNgAUCLsh7-d&!jp~ zSkMk1qNRjso-88~mEBNh5sjDk2EyUYe?aGm+7E%&E<2({$^@fQ%w5mq*~+kcq#F8u zmxzjW5hHwcEzSg)ju*yyr}zXt$}^aQl;+$<`r|CptmI~xt{+Rr5Y05kdri+-EN1+4 z!d|>GE99G$!b=+>V)_ZX9mJDcdYy$1g_~BENp+8}Xf|XtGq-!GRO~oeDkLjpJ;v$7 z8b7549~2A2*6_-izJx0;!A)1Df{$ZkbIi%%k)|$_JQ@8-v@p{4ifN@(f9lu!JMMTn zF!hwRAm(2G6gx*O<)C$Hod|LoH`l7{WHg0ixL6^r)BK~K4XcWBj?5lXm$!DV`*pI9 zJu|fq9z{G2CfQ!>d5S;hXZqwMBL;tzc^GV7=%Ea|*#weoC(|sR{HmSPAk0}==}f4) zu6jZVLaPdMsfojW_>$s#Y{`p>^=?HI8SAKtfT}Yg1u2g~Oj7lBp*!aLW3?(T%9v`c z3+)~q7MmT4hiIk{6D#S9feJdcQy#a)(M}3G`X@-Olj)_YA1b0 z6y3AqZHyT?YZ_oC>)oPfl~W&)G$9D=@38iNMxNqgxQZQLbes1G!+M1&Ef z4B;5X%t&A7&S+MX2Jzr6KN+BeW{r}4#AE;6xRX^Ilvc%%x%9#Vl0DPH(Gp-scqEG4 zs&Jj;lHn#N1y-aXaB(djXVr|k2cE!i7;oYgPuRE&D!&}LUja*+$57~fNtN0(zE#>8 zWa9LqOY!y6*my;*k2mbV1_W7GSjbwtN{7V^y-T*8=9?bJ@Wr;JSLUEVQDfE^<<0Nl z#-hD_97C+Ct2Urt*)Vzsa$G6v=r41uh?r*A%}e8K(sp;R7U`tB0qs$-nDCGfB*~!pzg%aDGDKxxOSqphe4YB^Ug%? zMHuBy?3>YrZ^12HkbRpUGMrO5=lIwFh0qrdjhmtzoc;#aQVGlkU|D_`enfKk#m3SS zA40)PNUKD+cLhY9CNI__(|;HYO!elk89JSGSzF^0N_@b}JS=<}jHV{$aVu=2nZ;;Jd5Sgo+2Q!WPj4$0XT={>sya^bD_ z@a21#%}n`uMc)PY`21Y`t3k2-a9*C~UufI6!hJ~gJ;bsQT{{~z>e(G*rt0HVuX!q& zesmLUWh-^X_g1<}^o2)u3{0iNK*hIOx()JYK=@*B~V*hhwkD-UaN#bn|h7-lqwe0Z`vt6i$sA zcN@sKX)C-bj3b;mGDfNl5kJADX{BpUMC0UxgN&9#yj#UrMkVM!(JSO`*RI$g{`A%c z!dC^0VFnHrg=~c!5 zno<(JFOeKznR{V=^gHKR_|0d)D_0iFsxD$Ml^bW*_F$V-h^BrjHzq4u{_q0TC(blV#D2^XcIg$fz}F8e zpUoqW2Hw!3W-JHQW-!1@p^3)h&n3x*=Co@h?u~wHzluyCg~tyrvp_+?t(qVtPRHD! z8;(WaV0Q$MCu}gshb^EX4Q<|@YUJhUpr?Nkm^Y~i;jb(aGLdlbqpgLm4%>mlgI)uB zHiyEFXVq#MOvC7i_)2SXd|#JHen6;|EE}Nju-@{T4#CVG#!Mm^#4r^khk8jXk(c_b zmd8ZK0=si+lC?;~Inw@f)s$#z7%lTK%@nLVE?!6AZqjhRc$og?z~4`)=iPjzT`#UY z7R}?r*9J#MLKbU<_C5|D^ErsdQep~J+0325ENTU>kcf5jsJ#}Bi2neKkU9ZTKC6Ti zWD=e#67_i{bw(n<_N$`r1gt?{B~LUd7Blt`_IhrSZE0X-Hk?exV2rgmZ#2Z>_e_uZL<0VJ){=(J`~y3 zr$%0E#<6XR6}83#>#bV*<7Xd|gp{!od8bBx=khZ8d*Pu5UF`fud}1K8OlPN7;8E$X z%9DMGy7(bTR@X&Q=7ZNfu2H|RQWH<(s-}wbc^wb;b!;c&&yU<2^IsQVg8xMKV6!J! zNtj=ztD|-+uI;C%wpHhcGO-9YuV4lIw8=t}hTV)x8Jud?ybPAXvMlS53CGVL9b6W} zOe*N1x|t*cLku)`Vnj@0W_kHh#f^= z>q^I3GsJD{-2Ec$(NMToQ@dcRzEH@n4s(mEdPJn~R=Za&=;d%`f&(St)}`zchK@z% zilxVW=IbN2_hVLq)mR{If*4WuCLB6H8`qhRwnXg0G8PK?SttH@lA6-4l zV$)1uABSaJJ0}gADmpI0u`iV!lr~2{|lc20dc~VKQ z4+yXjI0Wc5YrWmKImp}Z$#hRV%5tiCEFD!>eAjGAm=?Lo2;XEr5ErVF%4jQhUrd2* z<8Giin=Qk(0sH}&O!h6XBSF$CoJKm^_o^ed4`cf2mt*m-Bv|GG-PMKTq$HdHaULqQ ziu(f9vgFGItdKv(NC_@ z)_>&nYK~)f3;2S+nl;|289|JOKn?_2JPPP(PGFDJd@DHt%Q6|_%yS&`_kz4I99ud3o5NBqA->V*h%j-^`&O3_g)W?-0cxl{E>Op)blY6Z>!Tp97F(5*a zP!FehC3-C*1Qw9y6hOi32l7oVUc%GCJ<`dB=*BlD{nwO0G1;i-wpe>XzRjsDP!KCk z9Tmd6YghI67_MG3w@}Hkw>LFa@JneyDh#fqKdmIBr_R z!iKq!q3EC>l)`1;aJhkM?z@% zV;@(r99!Sm&uaud%T5Y8g2h)|&@7XL_?SQSe1Bim)$3>c zpI;A&lIkF|wQ9wYKOnny-|;$#8|&TLvnlr0r}SV+Y+rG4h24vi(xtNMdF3C0VU?AF z-x=PykC1td-)S2y7hpQ_CPd0NMl>%3x{OYhJMJ~7pdIiF0v=%vu{XOtwkB1))+|Y! zYUSvs>Uq~dQ9MC8k)JA3|6_p59bnWp1#gu^U|UH}#H_LB=cRc7Na~b|L2MEBiyZ86 z3(I5!2B%_Ovbg+A7vb;*UaL5YkHt392lV_(a!(8wJF|i;zZE}V{kSbE?UlDDbWK^1 z&v%eaqc{i!mB(FpG~oPXL*~&Hy){9^cb2|Q3S$f)q@ZER4BSAW84hI`*P$svQe=`P zk)qh=W_Oflqrs;ne#NJC6L`8<?siob*twZk&zP>?&Q!VW!0CjzEdc-`JI; zZL1?TU{i#RXlVkW?eVB`<6;#lKJ@Z*pAuB`8uTEAvF82i;fT)qp1M=Bq#^K^H^m|; zf@Bcx^ahF$`JTPHiVa}?Vwx%(y!NvpML+Eaq&}~FWYq2}NWK?rcZu+~&Yb7BuawYZ zp}|s$wk=jH+{lTW~(q z&?a~CfyjMu%TwjSEPmi}0SqROj7~c#m=o)RW%nTU%l=4iDoDV5z@ERFn@F z*C7x2U?*-!SdX9Em#;M!RFSBgoshhX&Xbr>9SXA0h{e#tghj9zA=~S){V*9d>SxcD zi5+;^+dQ1;19HWcn5@_yg;(Wp<`a+xj|{&T;c%UD5yyf@a>!N<@}rPBTAouB&ez)E zj%u~bDky8t+9udvMP0g%A1?q)5hQV2ZSB4mgd^v1nkgMrr_=${YoXmmBCEZIr1$5= zzo!t5=CI~Ah<@blYk9H@c)wMQc8sD#y*Nmxe!ms8prk+JN4_ah$(Yn{OnF`({2j*YJS=7kBpbn;*Kah zi23$JeZx0sik!jB=(kOMu0*PQG>x+qdG6AGvq-xsJsAS&wEzKj6QVyzf)vdhEy|GW z;U(#9GMrL>51rW~RFZmB?uU=NCko?TI4l7CIE^zud>v!mpYH2nQS+W;pXHg4msRqc4XkY4={@eL=dN<=RCXG6FBf)6A}gD z+Hnorf%k#@z)4O*bYH^Kco5j>eWmJI-5?fKMa;w?FD8Pio-j^n?|gC;;$oql_;xdC z9M4VbiqT8VghpiWnhJ9)0)V~l1Na>)yyj?QAJXCQpLgF@Smp1Yr5lwz9$ z6?)(~=tNOc)yyh=3Su2lXg6wb)N7T#hv54wc3~`oK6;&9=6@zI745Q`jO-F-1%UzR ztHsZ%mY?BD#A%-L6-${7qh6*!^2#*KWDFg!XJ! zyonJ=>-7RY163vB&+*r(Q@=Bh0 zYzYhgN`%%`(T2U(axj{<95W&G7eYLCLfN}Cde;%M>UgEfYaOk@J%WJsD3Wg|2$aNd z&(eOf-?xn(X;K1VT;teXX^!0$#U?QmpdC8S6k2YwZTLdLGDfh<8$t=;%JGDQkwqnq z#B4wMTwA$FJ4Acg67cNxi(C~K4-jk0ci0%1K-mc?oZXRF6eT$_Tn?78hrnrlJd`=RN)#vj@%Fw$(1@~wwrO=)De9Pyjwd_5ox8TP zuQdBm!kavV*TsKCYpEh^>opg`qj z-tIaIG9y7KKXMLMIRiWbCOP|e_- zxfF@(=n=rDiv#awWs9M7e#ePrb2L|KDO$P0-OiIuL$-9o1s!=+7NF6}KXp>N{FDEK zo2Hf?7fVz3oHWL~(C#0=QJG^COad8pC}sBg(H+_C;;H;y}e3zAeCLMtLiie__DP;^y9!$7ZRe#jUF+7kg)&)2$!!i(B24L%t>-4toy1A!SB-2~;1nw%V0jIoaS!r} zRlL9=3CUfGR2b4_AzB=M=XN|T^e%Uip|N7tb^*we?qhqE8JF?ol2*)l@Opvk=3r;s zf)S#LB*cW)3nZR^OEoVg?#I=^&K2Dh7y4+sAX6>`ucL%} zd$vAaF(%jktn0p3ey`!DtNlD4z`v_G?84ib)Ih|nw$#D8y3XDP#~b?BE-j}>HxUHi zImI$2l`4lOG(^`EIy(5=3Nf(Dj+)ynJ}$h#tJXlDhcN}T>U}qQY1})qLT*i}zDreZ z_Em8&g?njaunw^~CnM*IRJ$o12|aFyt{88v+*A&jt8{DJa7OQd zi8I2x<+Glj8$oF}uVZEtAHkNyLF}I|7w4GtHZy-Pzfe!l5IF1#SkE|jzYZx^rF1Xb zbf^kc+i*U_Emy>9U8!7hbEBY-AH!~HMDfs&|SkY%=ez%=iPO8pZx>A z@B8=@GQ%C8IOlxMxz2S(8I#m{AHCok6{8~nxNW>RvqgN4!t{-wB(~AnZUvsq&|!%3 z1ARb%j0}y78gUOzbCx28f1B`Br?CnJlh%Sv$`aQJ|7PFaeqg_ogmQ$uvZv>vOhVlU z9 z5u3CM=W7wel5AuGHlM+qrX1@@E<=ogoIkC(kzjXCtr;pfD$un`0qem~Ip#?Y91w7N zm)S0PHb{fzf1WO8I`AaZ{tF?$uA&BCXmOO-rz2-1Ro!dDoX7ev91UmeRJ_yYW1WH; z`CqR%8}oHG0Q4IbsUeIX=y)^8Hr?Wwh&En^Feiqn^m3PO?rr&w-itZN%TB10#5+Su zP5tQG41XV*>OkBeJcIXabn0g(&*}qSca5AZ&{D42)^jrpt}^3y9X(w&pe1TeVy^c5 z>+vLxzQZ4pCBibvq?@;(qv|dfbg348=IwVcMid8}NtHNaMDQQ&g_)tVuO(aA9a~Sc z+P9*hqd^VGL;K=c%DG7&GxdM_EGvdRQuM5y%ht$JT`GEnM3Cw2_6PRvO@+X<4k?|* ztQ>qXmgU;qd*LwPV8O(ow*79`E=yEfCg_De;SNseCnw&dE0Ws^Sg$k!@*g}24H^(g zk2VOxwcK+|+~P`BXI(9#{wQ@9IwPq#_@tIvv@s!VD`&b>ft>C$G6ojQe;mulHF~{6ozHWTSSC~PwU2#@ybIb#I{fJZlbFcFD%IvrV{M}5FO)_TM zdqu~QVJ20E(%OAaNY*6BUJE5?!3Q|4gJLWhFyu$ak-BmVReGKQ} z=TrWsO^?P5L5gZ+g#{2TOrLg_Q16aCB1|>8ndlV10w0E-vut8;v^cFCK2Cn%sgu=T zan}J9p0#@#M?xxUi5rI2V%(Bj=Ur?Sfh#|qI$Fa4W@{MuYDp6ewtewQT3hzE**DW6 z^8O|PMY{Lj{Gm7!&xDLFm&s;VXErgMD7@oIv*a^Zq>7h54KKz*$GjT<4Q|U9xC63RlXMBbTG&aPipKo755)g zaAN7c9DHdw?D*X0V_x8=sTW+&{1gOKqK$|{y8OLWbd5=4HT5?l^Xk!(L<^HO{5dlo zkXNqzzhrnx*Y-lFAe+{zjgxXqZ8h&RPMuAP%0&-5*-FUcs^xmA9pNZNrp;C=Q3~wX zHU&Yhw7qJ=XSsQM55&jsiKtofypoCb3sYBzE{8wla_nmk7&ho};xacJJy6F?LhaV( z)t0K4%3={gqu7TnFQ`BIS zn&oc=5V;uaV4(;1X72OW*`PLU6f$9Z>^Z8)<0KcG=b~3@l9Ec9ZQ^=RWLsrvsYfQi zp0@nNuVG8dbQ|450H6ojddg@bc5g#?NRDQ5oa5;oF1)?1CqP1uDw-s&QCn9tF9L9R z(GrTzTvwlhR;sIx_LGAG2=UKEWz;@~+biJ{#E^yny3?y%MrbMc1s8emP+LWUNAqn> z-vY@ea6U;6rJ7PaNyQcpG{@GHSG}#h)9*)+A)b+(vyMH+Q?44@l{d7f((f!kx}y#- zZ%Kq;uzcNGXK-$+D4uOY+iyrDqQTCcT<4~7ZrC?gCXBiRS>t@q>x@03HHPIa1m1V< z8#&$8z%0wf{vjvOXepx4i^UcptHmT7D<(mzFBp9Qo>Q|beP|CT>4$N_?yOIOHh~BVhMfyKNCy{>!Mq${ zjoN8W_+ElqX?AKnG}1DewzI*@6uV&tI@mdxd-~p&zp^rJEJyYIyD+mIM)Fg=(G~N_oykX{KT&vVo#Wa?gHCNSNNsVd&9})IU|+A)VxDIm-#51FXlvFtk=tkzLTSEoJR7Rq2GBaelj_uMar z|DX;+xwf<`C=E;DE`lUmv7LM7)=*Ywe8b5h@f&V1-A^ido-aN#D|iZ0;|41}$;7P; z<-jU11qH?`V1&qhr;>cchqelD!8$OGhh7741buE z|Io=L<^l)^7wqc!$lk4hLs=742%ex|YS6c0Q`oGpe8>eDt(tp;?lIjZLODvfjkXf# z8W(U8f%YcM?_q89vB7N?*S74=Ps}Qf?ak4*VEOuDD`Ka^Qh-D)3lTrfkTlh&9Zi6@ zSdeX>Wi;%3Oeba66c5nO5(cDw=m4V&Gb#+}ZXZ$}Y<;-2 zM|2y8v37qdGO=ZZZ^tUl3syVkk)=`2`uRN@;A4DV`3H;CU%9>fUFetMKf18#p8SjC zwm%;=Ffmy7Mp%9&QTv65Dl-j}vC|qd%4_>C#8u|EQSWiiX5xZB{Pos!*kT0N#yui zKK8?=#~%O)+N=OBI=I$zFKq%Uioel_0^i%F=1`K%#UAqb-8;wL0_g|ouE}wzvn12~ z!=Pf}tAtEbOEe)bLo91BkjN=g7N$cnV6VdSPeMUb`X!g+AUskeIiEiz>YVg~(JA5> zP3$V~^vYK*EiLCI11nZ)77w^8$NzSJgEii)vKUj-k;m4_5D+OcVv%YjyQqYHvqGg? zok+2*-OX}L(6?VtbD2$%!lzd$k{L5Z)APxH-cP|{VKK8M$2~A?7+T0Fs4Q}``PTNv zb=YZ)wS0~1XH)Z4U%#*SHhawIv8NAGq51Q=W~wnh?=6pz%a%W8B(V%lNJv}m>Njdd ze^XpHAnC55_jn*GnS%U)0p^wH*=;f|WDGBGV1EBq`!avHKZ=YC-y zZ{ig%=X;~Rm5fI*mJrj@2vgdUC39kHcs8+^VjIsUR_e4;E*c%-jVP9ulSNcGPOr=3 z%<0!J&m5!VqIjQMSY+H5{?43`Hm+@z3=rb2Qk8J6l`4x$9{a$8LCsB@h-yCb-0_iOO$3{$n=j~lMx4uZ)5(sb^iHVh)G|X# zIH^1Mvx!#iO?*RP|9WZ5)hFE_i;opI6}X2S5FJAb{P5HK9MeA;9Q`q6BoqfQgJwd2 zRdUYYhgRtf5J44;WXtLU9Dp=0k*ru5_c+6hMcCUqC&LiCY6Iad7Yk2y-ITogNP}2r zIlT%8^YAVq;?6qm+7@TkLM}7TsU{OcQxHU>TK#TTg>5|xB)ujU&v3!j)nfJQYAAbv z**C!4P(Q#32j&ULZY#+2px zbtaFlE1|><1y`;K%n`NCjw;8P8&HVcQ?|JGcP{2$)k8`mXAkytyh2Ot3!c|RL)9J>A~>N;P(Ho7=#g^&_jAqqcQ$zOg4$VYUY zSqDHidGnIYlAO0KafS9z!Hone`tp3Ch8! z=0b6w+zeZ$uI}V8wsrdSF*WF)Gf!`BPT#5@q0gb|kEp>!c~zYg)cKt}RFWdZdjLX}u-?>UNuQJ$5Fkd}Zs0`$H|QABr=UO0DmqD@>%Fd7W+Cxz{7v^`(V;wKeKZ{efg! zD&%k?C1+OFRpb=J)-QXbGviwt$vwDpLwPuuIQpog-U=p$9)GFS%Ad@wv?p-Z$p)Hz za8L{LAKc1i@45>s^^H&W%q4u1qRZBOb93Xx^n$~aWgChJ^u4B_+3-V&If~1x-#0!Q z7QbFZcsMt%^cCK~<6HwOQ*0Y;V64Gs+tR<1sd~#v(3obd@Y*5QFMeW@C&&Rdgwe)#Ydeli)@~;=Hbi2Z79J`%`h8u z5KH5g+*Z75j>4;!lk>=~1ma1?L(hveT^gQC*vyl8eZDtq@(KadBMpC8!_N})XdW8s z+t?COzlFTRhG^~co44;im;ySjr&lUqN5uH^b=SStxwv5)O~Rh^n>RO z6IPYBQ;W9;D`=g43JS1v+EDNww>u8G ztqtjt1+-N67{Vx>E|aYgjM;TAkiF~--~FVv0EmunwxT#7;tn{gjnVrxpRx%eR-?qP7SXA!o8W9? z4w&9l*Qh^VHlJ4qKGIu5FD#9Vq7g6voo-tdHyqXwfrZUW6gSWSYRuhxlv`^>mChu3 z-gR$dho&(kIV<~Jc5S;!n5dyI`3 zkeHk6qXvL(3~xwvVnBAJ{M4&UHW?j}`>Bl(hqdkwYa0wIpVuBs7rK&JL4wA08$r5K z=5voLX6`M5-Q2FD#ZPR5NX!N>I1%s_ao@{;18HrB=2~U19WdXNQ`mbYfwSG09Y8{& zqyB7zth8~r9V_Vl`Fu{0^=bCco3kMzLqe)X#Em?{gHxm#K9SNQaGiO1?{+)*jE`6| z|1k~K+`GbvgZkC@&A?&oY2TrfA+h(TX}%I6gnL$fTBkn=I{?n;qwS}qI(CB=A6?EK zCS0HUg9MMbKYGD=wRS6Ek%@TYO1#ecx=)7EHysBrA9nG$MG7AHEU`n$N_-u6^8CDW zsES5nS%`2xrc{4epi_gX=%+)iY7VH1#1_sE4H|YG(ugu9>2@HQQ@!}n>|Dbm{4Ni) zf6VsV`l^dyYA58$Eco;*X84{@`}NYp;NC6kX9xRrJ3&^Zsox-|XG3BL>t$QkdJL_C zi)&n&438B~5QX!Ls4h1|Y1wE&{u8|A4J(AZbIjSK*Uj{Pk($NDj5BO;9yU8R)Wq|i zcQtTywxd*8=sc0uC%rza@91Enfl9})*F?<|{G09gJ&JV3wG(!y7(RXc+nLxIe`(|B zfOX+$s-wyCfR?pqdvdq&CSxDw{zxZ}r~FBA1^}dK_S_}B_5R2D5B@oq15unCleOXt za?jB%M=tY0k@Xre-TBmR2$La-eaT>ky_9c_Q>q8?9$WJXjnO7VHO8`K_YeBoE8{gfYIk8`0=d9f0EOG&)`SDVt z{=L)IdHLhb`SWeD3O-9)bS$vF5-9nR-nUu9f!Wojcl=iolL@tWAyj5ok@a8YhoSIS z-Hz&q%VWx7g4@Gz*BR5w_xuR^&)jOgt_wDf*6l6xE>p}H{6>D?GaS5P4YE|++tw%H zSE*tAGsa;{)aw4SIv#EK_kC$UHi9D?Zj2i;?waxIx&EZ6iTw0#!kC!%IGM3c1l}lr z;JKME6)D2@v26Yk8O@VoCZ1o<4{Mltp-E<05x@D2QyP>KG;OQ6$oDQRd)4|E%N{1Q ztCWXC2$DNi^GfG4EtRsqB}LyCST7cujJo2w1z~Heqh9x&Y|5Sa9`CR0t3pX3ZtaP< z=YfSWst-iAsvNTqO{UjdTF%ae%|`9ordzN-B;Ljm^|)xlc@QZ(e*`<5c#Q(%Ru;9N zGJR_=WZ6v=g+Xq61BRar@QaV6IbJd4KZdYW4hErdyFm}gY|-|*yfE4}E-r4)S55{y zuLFKg2qBJlRNOkf2Bo!>#HQCK^_-_|Z5p8KzK9(%$DsfdMd&?${tNbk1R@=mdDj+R zqPBGRZA^JKQRAFfnv3>3ic%~@x}0CIy1)^f1T$}TiV%eaQIbDASIT9JYHrGS>KPEn zj8~Pli!LBtGQ4l51^g&JS1%`-;ziAFJAL1*){>{XAjkIXaL#E0Ef@!$AC=hw8K2!+&+h;k+aYPBRvjoJi$+Ix=@MK-BTPmw&VTnRDk^4cmOSEWF7?>l>wv8OiP9 zr5RP%ATTV9wHYlrQJ~gjyr+7Cgp?NO{W-nt>u29Pj8SxxPpp}TgGz^Z`AylnlHYYF zozIyFKbgdt^JPcBqj6l)^h>~i2c*ElR7QVa4?O;@>ys`v&8ionF?q&%Zb~2~Gbfu0 zcTGQH!6bfX*O1)VlNYJ{u9cVdb3tP1q4m<|lCB-I@dWU;a=xCsj<}XqdF0K9Q7!AGM6!bpAaCDvqruWu zqG?0ihu$9~_%C29{p=1dt1K!VjW^p*+fSlF8Td~UMZSEQzYxS-#+xyV61)CR-5I{% z0oHPwcDk)i>E1t2Ry3bOUJE|NGP+Df?~}|rDvx>)+kY-T4_|_xZ>n{Zdv}y6HterlNSJnCf?JmLdSH-@^Kgl>(C}E}n%L4uGW(PK*%M{E7MEXf_&d1D-&dPk5 z=^S?zG*?oG@DiML&3#THD4xPIPvTSR7mDj#ESjP9`}YgZf;*uhM#}e^=`*~jznq3E zh+}bBKf&SCT69DIPS;uU%4YZL!?~+jbffh-gDE>~35nPNrt+#HjZSi@Lc>dSf#58S zt1fnBEc}5bWTYXwQxyZvvV*Z^Yrb_%n|e`*w*K@Fe@pEz2c!-(A7}qQp*%^g1y-?o zZp9}nC-SnCbF9}>jtSV~)!2ds+@7PH9@K5*6_=>i;q!(&dx!&J`HhFn`o4SOwKUZk-d9`V zh@%>}00K5D-zrbgjV$>|63l)Zu-V6^6}|Mc2br`BK&+b6QsXuFE;NSxBz`2wQ#IXd zrTp-6kzY^tG(aF!Hj?mpHOExc*0h6;h(70aGY3mUyPPbg>+x6g-G{f$`0b|(Mfdq@#giQYXTc_n*~G%pDxi&()y}ty*Uau29{P3BKGvoO-}}bP|y2v z8Ica(QUA*MxWWrSVSXs#3DjeP$F+~^{h42)mI4kRcSHUYkO!mXrdCv)?B{$XbD_LB ziBzR+cr3J2HOjxi{Ju$wZ?p+#bI#3eqT&dW<~vch)c!C4+9-Q)=)dT?TjoeIVkw;g z_Viy`4L@Oo^N@NX}+_Sl#SvT zQe%+w7ZLHak7ahfG?$vZ_IcnaA~h7WwgM#`@}v^6t{K~>Ep6gjqPz6Q7Acx&>TqVT z>?Sdru}=`4x(q)}3V4oJk2g+H<4n;Wa_Nvp1+U%z8~{plDb{QJ^qipjoMD zSfB7)mP!UQ)5LvHI_-pg*#7uPS2E4l1w{YWDN~CGTL4f%t2F|f1KjJQR3M< zLca<$=C7uh&M@1+4u2L^f(OezTo=TyHaoB%Sn51?Y3B?+IcR@cf>^HK39i-mUe7Tw zFxn032$FR-R3PcoA5%yH-)^svYtP&)&wMv9YkTLFO8cDgT8GlD%g@}NgBaRs#1*gouATkyv?czo;}Fk(sHmF;02 zAsOkY*W@AV8USITc_0giZmtRk!}H{qdhn{AdbOO8Im$B32SxR#a;kXT)a}=hQ;_-c zOm!t}+K9Cs%aX%6MR#XLTQ z)AVDaH@=mvKX!6S)h2*kSE%Yu_bK4{_VonQGpuOab1huwcDp!s^_rM{$p_3q!QSYn zJw*D3FDYB-u@pg5S zonBMwcQjw;ccR?aiemga?m0I?@jYX<*jwb>2UcRR-%@JSQ<0W*E8|+eI}3*Q6;UrG z|2<%mLA3dWHpc~o8R}0x%7)Y|kEYR5m0TDSIv`*CO%{VN|2K%d&dv^#}X=d62})Q*dDYz@*-*zRl;3 zJ==L?^|AaT+Uc%0HWx?Y*liC?3GGw|tXJ6CT?Wu^`uY({tW=(tU$e1AsjKr1o8j?f zBRbt}eT5~e!F_8>g7rL+f>Zuu7KNQFzq%?JAir+?G2I853ab+GuO)?Sc4*2!ecYah z7q9%*WWAv1tqrT}BlW|{!e)jVoI6KY%vm*yY8>OHLPoZ`Cqtq!n0Vezwd-}+kVoHu zJlh$1VR2wTUV@(8#cR5X@KWEF%ORl>#$L2`qa2FOakf)*dq1^h?eETubZH)t?l{Id zmh@7dkC1eC%YLfTx?Yb$a_C8u7+XV)p!0kRw6{g%Q6$S6Dy_ggChF5yy^y$yV3T+B=k9MmMZO4o3H@yW3 z8`Wp6?9NbwWuHQ4o`S-zlksxw!Z%FoXsPXLrdD!bH{(!Puy85W{L2pOl_;zIMI~&M z7gBngZ^J%<#+0EZEYC|efuNjbL|dkxS3QU7YL)7I%Ka@lZ7gJXYV)a)gr$Byt*(i+ zd?)<;EjEm?rjRNHkvYGo{LAv=1j0J9)$g{LRmA=YKuQ*nb(#aD4H9;tNN!1VDzC%$ zHS+xK;Fe2%nO|4P6kEw5_ACNi}NX?DAjD42oi1=-INnVA8-x^1lT5{;JQG+ z8j>M$F!T|De`^DPkOlhE#re-Kfb@FMiYJf>y#d5z?(`!y+m;_~VW!q5+`PKFKG_#o z^s01PTF~|Eh1$DN!fLr&^fTr-w}VC_CL>_MYE!m0PtWEExCfyJ`%GV-$Jcj?tc;Gn zt}d`2{IHdEB2)_!a30W`0Z~yv9;~Zi-+M4LYqMbAMVI3y1uzW84GL5{&2HL9!rDKI zm~`bdH%*qzieCmV(x9!?imq|7x11gTCw$iD{N6*nqdmauT|n9ZLdFT{Sm%4JX-HVHMbr4l`+d*V(v_yWEZa{D5LtD><#*_vf+%CvxTe(VmSc9shsWlN`%THVSoe=?Bp zhTm8(=+!h6q>*EBzpqTZnp-A^UFC|=O?q^Hy zzB-MG`W}@!D)BJKyXo}6H_fN-NRsLiIw-?|K7l=lO`ZEwJdXG&b(Y->s;~MPB(SAv z^M~+$&?@p+{XWR{aE@C!W)KI+rgahB*lz_w2Do=L`1O2Wk;eX7I}pOCN;OoXa32ms zdvXh9LLT5eWtWX4Px|U_etcdozKgto^8|p6^^^Vf@hm=?$P!{35O7^{hqT^7yIy+QNa`j?l`~U?Q@LD|plDe8+geXl^8 z-LptOeXr>z(dJqVVK>PU2XGwFF+qMFMlc1ClK^^3Ohe6^a^Fg(QjZGtkJ+31h%NiH zx^3OOfQ35D^i#`rESp1F6qZHpM)z7nXQ*X&=JQPl&WzbeS92wKSH`uPCPK>ee4$}% zh{Nc(S!s3dWU%IB=v?$oLQxLyV}dJG?uchjd*JOVu!DeyN0xD*_d$`jIVFRMzKy z>(fXKYJ!33mk?ZT_IzYCj>P{qdUqcH4Wm#%42q|Sc(rhVwZtCc;FS3;3pbI^mByK# zgnNKPs@wFLn8Utatbt99`j)Hjek#+5*C*7zikHdn58Z@8)2DCN6ZXkVn+3hM^DEnS zYvnD*ZG}3T9Tw)jq-xLz9HS`L-1O*E8e#UwlpSuRR_?Fgt-rz%JQi4EcURt(Y=)-? zb4>;T*#()J1-QZRbNQyCnzph^wpznsr&TAN@01NKuH8wdgVx20-X;U?#Z@E@9+rzb zcFVFEP*)KMXHePHiu`9kTh?RWw%iG?l$wjK;APN2uK%j12ftfOVf5QYQ*DLd0+SG65BK6|0M1h(= zp!;*s2`bH8253NOCdcRuvrDl*=9@9mI?=)^6@~Y$rqfyw$4##C2ThaCM1sj?*1}*b zxgVtEMEVB4=Aj+L-;)b?`ipYvHrQxNlQ_irOj@`%_M*h0mw-P>accDg>IZA|8O*Ac zaj?Denlb8JNj2?tJ_xfv&+#YtOeq}laO2@^bS3{H?zoKHwu$7@1R$Z!1fn|XlSd@G zPfYtbpPQUn80KQ;{k#Us)H?3H?iUNrWU3iHZLbtZI@j{Ryjprep1r|0ZZX_LEsb$4 zx9sIMGAMWPJ+R5AUO|c8xV7{8E4)-~yMZlEmVXO!q_&m`3K4x3<|yC7xAhIcv!iEq zgkPNE#VwQskOGxPAmvXb0Bzmid$hLnWVKRRJZE|o_gEiwG}inVom@tx%$kFZE~(KR zK+h@~BQwYyXc+$%U3umS$V^D1Eig99!7ag zOq)XV?JpEvdnaa`T;Jy$T7f5l>hl!M-f^ zPG`7IqI5I$XgGWYvM9qVXv4W{j}sDmoF{c%0sSCMG9`u#uVZ9g3P4;(*mBMR z1TGkioItE3oS^4naby*ix9np|-woT`b zy>0XR4lytwmln>e)pot;+WeLSE$x@$*^8y8N)j|1T8*fQa2IO7N6H0hz>9uX?QSf0 zPo;_ySS9Rt3+db($rcAF`Zuk=20oerNRYXOB#Wj1rOTeAFWmR>5^wfhTf&-okRZBE zTGSpk*9Ft5F=Y{%%h`j9$;mn^)ApN9R-6ShNVJx|OqIN^ZD5>Mp+4cy$oiK$0)FB!kg8HLU+HE5pri)E?#$K3po@(Z z+Z%vpl;$2I`U7IS)d+qupbqB?NN3e5U+-29$t3r#kf-o)dhV9XGCznYy^Bp=yHnE8 zE&D)O3~GFpr;S9<=;KPX@*A`or!u>3a(*wWxSH|ClP-NXDAI536h6Dx2uVjLUkGW9 zCcqEf2?bGMaoPZ-iMbJtt*K!>^_<&(t^*WIuREA-^cz}b!(fL7JOhY;Tr7^*rNO|| zA`sLfRUJjx%34v8vXWoq*#wc=7@{|E)Z__JKaE&uNZZ6828|62#8ZOU_c&@ zDtG3Z(4~_^{bttkQd-w;qo=zST)4kJnSucj&ogk?WyeUIV3rLm_#)uJ*c8b)q!#oo218vdK<5>!tno(Ly8vRs zKyXEA+4D}py{fThpx>uWQik$PZ^HR+lYxsdz%w>*9TW6t1{lbosPLoI#7ssgMd6#% zppv`y28#SN3A5&2^w=pzsx>+txDRvygi6F<6Ra)gM*u9zsdT^pZWdq~{sA?@&kOKN zPIQ(S3qyMo-3ruWz{2~rtLXqc_tkb`VXC$}pgpC;4dy}eCDG%kD+!u()1O>IAsMnM zfS(qe^Pzmt^9}7T7HO=QkMsby;+QJm_oK)86}Hu}$W+fC$V>orVv6SmKbyp~1S*&B zHT_s0Mm`&eXT~O=^$)^O0EB6+BH;ia)-%<$mqKg~LQr}UDyi?#OSc2Gfj!_J_*?*t zmh^@DZRU&!9lK~86|!H4`~Lt+RRaBHdkb5S;?SUCP zs^hVhQ}_NFPd}}<-b+te2W6Dt<(>E=SV4gCPJk~VMG-CCfx#99;7V;9 zK$kJ8po1|7plsTO)F8-`?@R!;hK!d2*4Dase$y}U{-2XJjDZrE7%@CnibgMnVjSJi z)?l3+xmUAzlYzK4v+Ph@K}zANr<6rs&jNeyaTv>&8CyV@u;*!AhZzv`8}WNwF(*&H zu__kaNTe0Z4bswc4;0-(uw8fp^0A||0_ytz1^out2xn9qhSEV$4gMxNTD*?KOPA>E z0Fu$Z{euX;8FOfPi~ntO2X!a_`O7d32@@xIl3wtz^>#20ReH1NW@dm}r|Sa_ zZ~5myZ?#p}{FXOwM`TdpUx0+IHHXLPCeU*JN=~#LgcLadtn;8S zF_&=<0G;U&vv&!DEDrL`TBpX5=+l7a0DOt1=o0<`fHmnsFZsQi^nZ5Ie|GI=oJ4TF zY7^I}KiU-$O3AwW37CJ;Z53<(gl3&QOb|efImReQ7j|sn>iNRY#t#6iDh7x+ml33r zdHw)I%7Fbx9oe{FzjGIda;~LJA#q&nxQF&R4^n$8aq|sxJW|>>XVl2>FJ=O;>OV2c zvKac8*%r_H8=eiEKUH^p;@|xEa8USw+^!XcycznFaP(nY!S6GLL}dQ5K>+t+ZcCF( z1Ur>Z<5a+`VnDIM6#~wbv)r~Da)13Q@~`|K(Jv(>vg!!X0}9r^UgytCGR6@azPbOm zXZ&NR{Nw3?C6f~d>ipHs|K*|XaTeeHmur8PkS743o}*nxEB|%_U;|G4xgmcwsK9T5 zWeOzewpKXp|Lq2T&lGn4e3!qy;qWNn<9|3ECH||_vjk19^Iu-(*ZY`T08Z|0y_*7{ zF#fj(`_JEG;*JUZ_fLS|R`CGu#;Pkquk>$2uM%QG@wZ3*^J;IBfvk5t8zljtSpLU! z{N+c+&MNHx{t56~Rz;x9BTp0+`mZA2z_g?P%X?A(Z`%Ji?Z0k?KUeAh4ZB@p3`2n{ zG|h6%fb{b}wl+1sT@!_>Ir5E%8U2}Z6F*Q32?>#6y+Hhz0eJ!#7Ukj5H#pdx^z$$U5y znW_Yov#)``jnITINPY32kH&^dk(QSB<+n%n-@r#kvid-7oxSC{qOOfJ`5&XSRQM8k z(5svSR|De&L91Ob17Q3IEeNrzL^*C4c@5s*EtAAsq$`-{vfB6;qce;EPCKRub}7d{ zjo@Ft>i+}ji;BZ02mZwuWg?KbK=coj|NZ9w8QRQZq(9kN;dT7S2>fnYHxa;uo+G$; zeueA)`dxD?z$eVsyMh1BCtQgGRv+H-0PO#dG5dYCW{`IM2b&)Ezu0yE8)d+9|8E%n fUm1p0#O*IDto*y|U2eC4e=nt#o|il``0)P$?f~yH From 59cf865d847f6d0181188bc11d4d9aa8b1ec0faa Mon Sep 17 00:00:00 2001 From: Xiaodong Date: Sun, 27 Jan 2019 14:14:19 +0530 Subject: [PATCH 0018/1104] [AIRFLOW-3761] Decommission User & Chart models & Update doc accordingly (#4577) In master branch, we have already decommissioned the Flask-Admin UI. In model definitions, User and Chart are only applicable for the "old" UI based on Flask-Admin. Hence we should decommission these two models as well. Related doc are updated in this commit as well. --- airflow/config_templates/default_airflow.cfg | 3 - .../cf5dc11e79ad_drop_user_and_chart.py | 87 +++++++++++++++++++ airflow/models/__init__.py | 47 +--------- airflow/utils/db.py | 20 ----- docs/security.rst | 36 +------- tests/utils/test_db.py | 6 -- 6 files changed, 90 insertions(+), 109 deletions(-) create mode 100644 airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py diff --git a/airflow/config_templates/default_airflow.cfg b/airflow/config_templates/default_airflow.cfg index e6bf31355f222..2069f933b3e71 100644 --- a/airflow/config_templates/default_airflow.cfg +++ b/airflow/config_templates/default_airflow.cfg @@ -271,9 +271,6 @@ access_logfile = - error_logfile = - # Expose the configuration file in the web server -# This is only applicable for the flask-admin based web UI (non FAB-based). -# In the FAB-based web UI with RBAC feature, -# access to configuration is controlled by role permissions. expose_config = False # Set to true to turn on authentication: diff --git a/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py b/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py new file mode 100644 index 0000000000000..26325c10855af --- /dev/null +++ b/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py @@ -0,0 +1,87 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""drop_user_and_chart + +Revision ID: cf5dc11e79ad +Revises: 41f5f12752f8 +Create Date: 2019-01-24 15:30:35.834740 + +""" +from alembic import op +from sqlalchemy.dialects import mysql +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'cf5dc11e79ad' +down_revision = '41f5f12752f8' +branch_labels = None +depends_on = None + + +def upgrade(): + op.drop_table("chart") + op.drop_table("users") + + +def downgrade(): + conn = op.get_bind() + + op.create_table( + 'users', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(length=250), nullable=True), + sa.Column('email', sa.String(length=500), nullable=True), + sa.Column('password', sa.String(255)), + sa.Column('superuser', sa.Boolean(), default=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('username') + ) + + op.create_table( + 'chart', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('label', sa.String(length=200), nullable=True), + sa.Column('conn_id', sa.String(length=250), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('chart_type', sa.String(length=100), nullable=True), + sa.Column('sql_layout', sa.String(length=50), nullable=True), + sa.Column('sql', sa.Text(), nullable=True), + sa.Column('y_log_scale', sa.Boolean(), nullable=True), + sa.Column('show_datatable', sa.Boolean(), nullable=True), + sa.Column('show_sql', sa.Boolean(), nullable=True), + sa.Column('height', sa.Integer(), nullable=True), + sa.Column('default_params', sa.String(length=5000), nullable=True), + sa.Column('x_is_date', sa.Boolean(), nullable=True), + sa.Column('iteration_no', sa.Integer(), nullable=True), + sa.Column('last_modified', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + + if conn.dialect.name == 'mysql': + conn.execute("SET time_zone = '+00:00'") + op.alter_column(table_name='chart', column_name='last_modified', type_=mysql.TIMESTAMP(fsp=6)) + else: + if conn.dialect.name in ('sqlite', 'mssql'): + return + + if conn.dialect.name == 'postgresql': + conn.execute("set timezone=UTC") + + op.alter_column(table_name='chart', column_name='last_modified', type_=sa.TIMESTAMP(timezone=True)) diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 35be84ce200d7..e2deac82a59bb 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -63,12 +63,12 @@ from urllib.parse import quote from sqlalchemy import ( - Boolean, Column, DateTime, Float, ForeignKey, ForeignKeyConstraint, Index, + Boolean, Column, DateTime, Float, ForeignKeyConstraint, Index, Integer, LargeBinary, PickleType, String, Text, UniqueConstraint, and_, asc, func, or_, true as sqltrue ) from sqlalchemy.ext.declarative import declared_attr -from sqlalchemy.orm import reconstructor, relationship, synonym +from sqlalchemy.orm import reconstructor, synonym from croniter import ( croniter, CroniterBadCronError, CroniterBadDateError, CroniterNotAlphaError @@ -606,24 +606,6 @@ def dagbag_report(self): ) -class User(Base): - __tablename__ = "users" - - id = Column(Integer, primary_key=True) - username = Column(String(ID_LEN), unique=True) - email = Column(String(500)) - superuser = Column(Boolean(), default=False) - - def __repr__(self): - return self.username - - def get_id(self): - return str(self.id) - - def is_superuser(self): - return self.superuser - - class TaskInstance(Base, LoggingMixin): """ Task instances store the state of a task instance. This table is the @@ -4355,31 +4337,6 @@ def _test_cycle_helper(self, visit_map, task_id): visit_map[task_id] = DagBag.CYCLE_DONE -class Chart(Base): - __tablename__ = "chart" - - id = Column(Integer, primary_key=True) - label = Column(String(200)) - conn_id = Column(String(ID_LEN), nullable=False) - user_id = Column(Integer(), ForeignKey('users.id'), nullable=True) - chart_type = Column(String(100), default="line") - sql_layout = Column(String(50), default="series") - sql = Column(Text, default="SELECT series, x, y FROM table") - y_log_scale = Column(Boolean) - show_datatable = Column(Boolean) - show_sql = Column(Boolean, default=True) - height = Column(Integer, default=600) - default_params = Column(String(5000), default="{}") - owner = relationship( - "User", cascade=False, cascade_backrefs=False, backref='charts') - x_is_date = Column(Boolean, default=True) - iteration_no = Column(Integer, default=0) - last_modified = Column(UtcDateTime, default=timezone.utcnow) - - def __repr__(self): - return self.label - - class Variable(Base, LoggingMixin): __tablename__ = "variable" diff --git a/airflow/utils/db.py b/airflow/utils/db.py index c055cdc45b4ff..76b236e7fd318 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -84,8 +84,6 @@ def merge_conn(conn, session=None): def initdb(): - session = settings.Session() - from airflow import models from airflow.models.connection import Connection upgradedb() @@ -296,24 +294,6 @@ def initdb(): # Deactivate the unknown ones models.DAG.deactivate_unknown_dags(dagbag.dags.keys()) - Chart = models.Chart - chart_label = "Airflow task instance by type" - chart = session.query(Chart).filter(Chart.label == chart_label).first() - if not chart: - chart = Chart( - label=chart_label, - conn_id='airflow_db', - chart_type='bar', - x_is_date=False, - sql=( - "SELECT state, COUNT(1) as number " - "FROM task_instance " - "WHERE dag_id LIKE 'example%' " - "GROUP BY state"), - ) - session.add(chart) - session.commit() - from flask_appbuilder.models.sqla import Base Base.metadata.create_all(settings.engine) diff --git a/docs/security.rst b/docs/security.rst index 3e63866c08543..291085c4aae8e 100644 --- a/docs/security.rst +++ b/docs/security.rst @@ -61,44 +61,10 @@ Web Authentication Password '''''''' -.. note:: - - This is for flask-admin based web UI only. If you are using FAB-based web UI with RBAC feature, - please use command line interface ``airflow users --create`` to create accounts, or do that in the FAB-based UI itself. - One of the simplest mechanisms for authentication is requiring users to specify a password before logging in. -Password authentication requires the used of the ``password`` subpackage in your requirements file. Password hashing -uses ``bcrypt`` before storing passwords. - -.. code-block:: bash - [webserver] - authenticate = True - auth_backend = airflow.contrib.auth.backends.password_auth - -When password auth is enabled, an initial user credential will need to be created before anyone can login. An initial -user was not created in the migrations for this authentication backend to prevent default Airflow installations from -attack. Creating a new user has to be done via a Python REPL on the same machine Airflow is installed. - -.. code-block:: bash +Please use command line interface ``airflow users --create`` to create accounts, or do that in the UI. - # navigate to the airflow installation directory - $ cd ~/airflow - $ python - Python 2.7.9 (default, Feb 10 2015, 03:28:08) - Type "help", "copyright", "credits" or "license" for more information. - >>> import airflow - >>> from airflow import models, settings - >>> from airflow.contrib.auth.backends.password_auth import PasswordUser - >>> user = PasswordUser(models.User()) - >>> user.username = 'new_user_name' - >>> user.email = 'new_user_email@example.com' - >>> user.password = 'set_the_password' - >>> session = settings.Session() - >>> session.add(user) - >>> session.commit() - >>> session.close() - >>> exit() LDAP '''' diff --git a/tests/utils/test_db.py b/tests/utils/test_db.py index 718e2ae1066fe..6ebc4b2086b96 100644 --- a/tests/utils/test_db.py +++ b/tests/utils/test_db.py @@ -40,12 +40,6 @@ def test_database_schema_and_sqlalchemy_model_are_in_sync(self): # known diffs to ignore ignores = [ - # users.password is not part of User model, - # otherwise it would show up in (old) UI - lambda t: (t[0] == 'remove_column' and - t[2] == 'users' and - t[3].name == 'password'), - # ignore tables created by celery lambda t: (t[0] == 'remove_table' and t[1].name == 'celery_taskmeta'), From 1ab659f4cb4f2af49205fca57758a72f6341a125 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Sun, 27 Jan 2019 12:32:10 +0100 Subject: [PATCH 0019/1104] [AIRFLOW-XXX] Remove almost all warnings from building docs (#4588) --- airflow/contrib/operators/qubole_operator.py | 10 +++++++--- airflow/models/__init__.py | 2 +- docs/code.rst | 4 ++-- docs/conf.py | 2 +- docs/howto/manage-connections.rst | 21 ++++++++++---------- docs/howto/operator.rst | 21 ++++++++++---------- docs/integration.rst | 11 +++++++--- docs/plugins.rst | 2 +- 8 files changed, 42 insertions(+), 31 deletions(-) diff --git a/airflow/contrib/operators/qubole_operator.py b/airflow/contrib/operators/qubole_operator.py index e6ec21ba94b6e..0a591fc7bd274 100755 --- a/airflow/contrib/operators/qubole_operator.py +++ b/airflow/contrib/operators/qubole_operator.py @@ -117,15 +117,19 @@ class QuboleOperator(BaseOperator): :additional_options: Additional Sqoop options which are needed enclose options in double or single quotes - .. note:: Following fields are template-supported : ``query``, ``script_location``, + .. note: + + Following fields are template-supported : ``query``, ``script_location``, ``sub_command``, ``script``, ``files``, ``archives``, ``program``, ``cmdline``, ``sql``, ``where_clause``, ``extract_query``, ``boundary_query``, ``macros``, ``tags``, ``name``, ``parameters``, ``dbtap_id``, ``hive_table``, ``db_table``, ``split_column``, ``note_id``, ``db_update_keys``, ``export_dir``, ``partition_spec``, ``qubole_conn_id``, ``arguments``, ``user_program_arguments``. - You can also use ``.txt`` files for template driven use cases. + You can also use ``.txt`` files for template driven use cases. + + .. note: - .. note:: In QuboleOperator there is a default handler for task failures and retries, + In QuboleOperator there is a default handler for task failures and retries, which generally kills the command running at QDS for the corresponding task instance. You can override this behavior by providing your own failure and retry handler in task definition. diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index e2deac82a59bb..60f8cdcedba86 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -4496,7 +4496,7 @@ def set( """ Store an XCom value. TODO: "pickling" has been deprecated and JSON is preferred. - "pickling" will be removed in Airflow 2.0. + "pickling" will be removed in Airflow 2.0. :return: None """ diff --git a/docs/code.rst b/docs/code.rst index 6b07c3a9218ba..e2754d3fa4693 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -322,11 +322,11 @@ Variable Description ``{{ prev_ds }}`` the previous execution date as ``YYYY-MM-DD`` if ``{{ ds }}`` is ``2018-01-08`` and ``schedule_interval`` is ``@weekly``, ``{{ prev_ds }}`` will be ``2016-01-01`` -``{{ prev_ds_nodash }}`` the previous execution date as ``YYYYMMDD`` if exists, else ``None` +``{{ prev_ds_nodash }}`` the previous execution date as ``YYYYMMDD`` if exists, else ``None`` ``{{ next_ds }}`` the next execution date as ``YYYY-MM-DD`` if ``{{ ds }}`` is ``2018-01-01`` and ``schedule_interval`` is ``@weekly``, ``{{ next_ds }}`` will be ``2018-01-08`` -``{{ next_ds_nodash }}`` the next execution date as ``YYYYMMDD`` if exists, else ``None` +``{{ next_ds_nodash }}`` the next execution date as ``YYYYMMDD`` if exists, else ``None`` ``{{ yesterday_ds }}`` the day before the execution date as ``YYYY-MM-DD`` ``{{ yesterday_ds_nodash }}`` the day before the execution date as ``YYYYMMDD`` ``{{ tomorrow_ds }}`` the day after the execution date as ``YYYY-MM-DD`` diff --git a/docs/conf.py b/docs/conf.py index cf31d5e57ded5..dbd5393f05a4d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -224,7 +224,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied diff --git a/docs/howto/manage-connections.rst b/docs/howto/manage-connections.rst index dacefc41ec247..25bd1492ef3b4 100644 --- a/docs/howto/manage-connections.rst +++ b/docs/howto/manage-connections.rst @@ -115,16 +115,17 @@ The following connection IDs are used by default. ``google_cloud_default`` Used by those hooks: -* :class:`~airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook` -* :class:`~airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook` -* :class:`~airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook` -* :class:`~airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook` -* :class:`~airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook` -* :class:`~airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook` -* :class:`~airflow.contrib.hooks.gcp_compute_hook.GceHook` -* :class:`~airflow.contrib.hooks.gcp_function_hook.GcfHook` -* :class:`~airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook` -* :class:`~airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook` + + * :class:`~airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook` + * :class:`~airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook` + * :class:`~airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook` + * :class:`~airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook` + * :class:`~airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook` + * :class:`~airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook` + * :class:`~airflow.contrib.hooks.gcp_compute_hook.GceHook` + * :class:`~airflow.contrib.hooks.gcp_function_hook.GcfHook` + * :class:`~airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook` + * :class:`~airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook` Configuring the Connection diff --git a/docs/howto/operator.rst b/docs/howto/operator.rst index 59656beb4302c..686afd1b05907 100644 --- a/docs/howto/operator.rst +++ b/docs/howto/operator.rst @@ -174,7 +174,7 @@ Templating More information """""""""""""""" -See `Google Compute Engine API documentation +See `Google Compute Engine API documentation for start `_. @@ -228,7 +228,7 @@ Templating More information """""""""""""""" -See `Google Compute Engine API documentation +See `Google Compute Engine API documentation for stop `_. @@ -288,7 +288,7 @@ Templating More information """""""""""""""" -See `Google Compute Engine API documentation +See `Google Compute Engine API documentation for setMachineType `_. @@ -348,7 +348,7 @@ Templating More information """""""""""""""" -See `Google Compute Engine API documentation +See `Google Compute Engine API documentation for instanceTemplates `_. GceInstanceGroupManagerUpdateTemplateOperator @@ -415,14 +415,14 @@ the permissions that theService Account User role provides More information """""""""""""""" -See `Google Compute Engine API documentation +See `Google Compute Engine API documentation for instanceGroupManagers `_. Google Cloud Bigtable Operators ------------------------------- Arguments -""""""""" +^^^^^^^^^ All examples below rely on the following variables, which can be passed via environment variables. @@ -604,7 +604,7 @@ Templating More information """""""""""""""" -See `Google Cloud Functions API documentation +See `Google Cloud Functions API documentation for delete `_. GcfFunctionDeployOperator @@ -736,7 +736,7 @@ can be downloaded if necessary. More information """""""""""""""" -See `Google Cloud Functions API documentation +See `Google Cloud Functions API documentation for create `_. Google Cloud Spanner Operators @@ -1193,8 +1193,9 @@ CloudSqlInstanceExportOperator Exports data from a Cloud SQL instance to a Cloud Storage bucket as a SQL dump or CSV file. -Note: This operator is idempotent. If executed multiple times with the same -export file URI, the export file in GCS will simply be overridden. +.. note:: + This operator is idempotent. If executed multiple times with the same + export file URI, the export file in GCS will simply be overridden. For parameter definition take a look at :class:`~airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator`. diff --git a/docs/integration.rst b/docs/integration.rst index fbe4accac6332..797286cbe987d 100644 --- a/docs/integration.rst +++ b/docs/integration.rst @@ -90,8 +90,9 @@ middleware in your `airflow.cfg`:: enable_proxy_fix = True -Note: you should only enable the `ProxyFix` middleware when running -Airflow behind a trusted proxy (AWS ELB, nginx, etc.). +.. note:: + You should only enable the `ProxyFix` middleware when running + Airflow behind a trusted proxy (AWS ELB, nginx, etc.). .. _Azure: @@ -195,12 +196,16 @@ AzureCosmosDBHook .. autoclass:: airflow.contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook :noindex: +.. _AzureCosmosInsertDocumentOperator: + AzureCosmosInsertDocumentOperator """"""""""""""""""""""""""""""""" .. autoclass:: airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator :noindex: +.. _AzureCosmosDocumentSensor: + AzureCosmosDocumentSensor """"""""""""""""""""""""" @@ -957,7 +962,7 @@ Cloud Bigtable Operators .. _BigtableInstanceCreateOperator: BigtableInstanceCreateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator :noindex: diff --git a/docs/plugins.rst b/docs/plugins.rst index aca36b070ea49..a4913dce27537 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -228,7 +228,7 @@ the fields appbuilder_views and appbuilder_menu_items were added to the AirflowT Plugins as Python packages -------------------------- -It is possible to load plugins via `setuptools' entrypoint`_ mechanism. To do this link +It is possible to load plugins via `setuptools entrypoint `_ mechanism. To do this link your plugin using an entrypoint in your package. If the package is installed, airflow will automatically load the registered plugins from the entrypoint list. From 61fb776c3bb238bddbc668d743e1ce0bc66b5a06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Sun, 27 Jan 2019 18:20:59 +0100 Subject: [PATCH 0020/1104] [AIRFLOW-XXX] Remove profiling link (#4602) --- docs/index.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index a5f0c4503a630..97a5107fa0eea 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -80,7 +80,6 @@ Content howto/index ui concepts - profiling cli scheduler plugins From fa21d68d3c6015b216c1717333678b84a41d94bc Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Sun, 27 Jan 2019 09:22:06 -0800 Subject: [PATCH 0021/1104] [AIRFLOW-3773] Fix /refresh_all endpoint (#4597) * [AIRFLOW-3773] Fix /refresh_all endpoint Call `sync_perm_for_dag` for each DAG in the DagBag (`dag_id` is a required argument). I looked for a test suite for the web UI, but it seems the existing tests have all been disabled since the switch to FAB. I've created a new class for FAB tests and added a test to exercise this `/refresh_all` endpoint. * Move tests to www/test_views.py I didn't realize that we already had test scaffolding in place for testing the FAB-based UI. --- airflow/www/views.py | 5 +++-- tests/www/test_views.py | 5 +++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/airflow/www/views.py b/airflow/www/views.py index f2a440a63ff2b..63e88fa57fdf2 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -1687,8 +1687,9 @@ def refresh(self, session=None): @action_logging def refresh_all(self): dagbag.collect_dags(only_if_updated=False) - # sync permissions for all dags - appbuilder.sm.sync_perm_for_dag() + for dag_id in dagbag.dags: + # sync permissions for all dags + appbuilder.sm.sync_perm_for_dag(dag_id) flash("All DAGs are now up to date") return redirect('/') diff --git a/tests/www/test_views.py b/tests/www/test_views.py index ac5a3e42f3f14..700c06924bcf6 100644 --- a/tests/www/test_views.py +++ b/tests/www/test_views.py @@ -518,6 +518,11 @@ def test_refresh(self): resp = self.client.get('refresh?dag_id=example_bash_operator') self.check_content_in_response('', resp, resp_code=302) + def test_refresh_all(self): + resp = self.client.get("/refresh_all", + follow_redirects=True) + self.check_content_in_response('', resp, resp_code=200) + def test_delete_dag_button_normal(self): resp = self.client.get('/', follow_redirects=True) self.check_content_in_response('/delete?dag_id=example_bash_operator', resp) From 8f3982f8152d225bfbc063daa722e8447f0fa1a2 Mon Sep 17 00:00:00 2001 From: Sid Anand Date: Sun, 27 Jan 2019 14:03:09 -0800 Subject: [PATCH 0022/1104] [AIRFLOW-XXX] Add Tinder to the companies list (#4604) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 92b50a68833b7..7caa2d3490755 100644 --- a/README.md +++ b/README.md @@ -320,6 +320,7 @@ Currently **officially** using Airflow: 1. [Thumbtack](https://www.thumbtack.com/) [[@natekupp](https://github.com/natekupp)] 1. [Tictail](https://tictail.com/) 1. [Tile](https://tile.com/) [[@ranjanmanish](https://github.com/ranjanmanish)] +1. [Tinder](https://tinder.com/) [[@kbendick](https://github.com/kbendick)] 1. [Tokopedia](https://www.tokopedia.com/) [@topedmaria](https://github.com/topedmaria) 1. [Twine Labs](https://www.twinelabs.com/) [[@ivorpeles](https://github.com/ivorpeles)] 1. [Twitter](https://www.twitter.com/) [[@aoen](https://github.com/aoen)] From dfffd7a0e984413c1feed9643841d3b1007c51af Mon Sep 17 00:00:00 2001 From: Anoop Kunjuraman Date: Sun, 27 Jan 2019 23:02:49 -0600 Subject: [PATCH 0023/1104] [AIRFLOW-XXX] Add Capital One to the companies list (#4606) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7caa2d3490755..0893bcb2a0518 100644 --- a/README.md +++ b/README.md @@ -146,6 +146,7 @@ Currently **officially** using Airflow: 1. [BounceX](http://www.bouncex.com) [[@JoshFerge](https://github.com/JoshFerge), [@hudsonrio](https://github.com/hudsonrio), [@ronniekritou](https://github.com/ronniekritou)] 1. [Branch](https://branch.io) [[@sdebarshi](https://github.com/sdebarshi), [@dmitrig01](https://github.com/dmitrig01)] 1. [California Data Collaborative](https://github.com/California-Data-Collaborative) powered by [ARGO Labs](http://www.argolabs.org) +1. [Capital One](https://www.capitalone.com) [[@anoopengineer](https://github.com/anoopengineer)] 1. [Carbonite](https://www.carbonite.com) [[@ajbosco](https://github.com/ajbosco)] 1. [CarLabs](https://www.carlabs.ai/) [[@sganz](https://github.com/sganz) & [@odannyc](https://github.com/odannyc)] 1. [CAVA](https://www.cava.com) [[@minh5](http://github.com/minh5) & [@patchus](http://github.com/patchus)] From 03ec4181f3d0d9e50c99ecedb331412e890878a9 Mon Sep 17 00:00:00 2001 From: Ryan Yuan Date: Tue, 29 Jan 2019 08:38:10 +1100 Subject: [PATCH 0024/1104] [AIRFLOW-3762] Add list_jobs to CLI (#4579) * [AIRFLOW-3762] Add list_jobs to CLI Add list_jobs to CLI * [AIRFLOW-3762] Add list_jobs to CLI Improve test_cli_list_jobs_with_args * [AIRFLOW-3762] Add list_jobs to CLI Directly parse args.limit to list_jobs query * [AIRFLOW-3762] Add list_jobs to CLI Format list_jobs code --- airflow/bin/cli.py | 43 +++++++++++++++++++++++++++++++++++++++++++ tests/core.py | 11 +++++++++++ 2 files changed, 54 insertions(+) diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index c15d0212b5a14..df98e4d83510e 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -636,6 +636,39 @@ def list_tasks(args, dag=None): print("\n".join(sorted(tasks))) +@cli_utils.action_logging +def list_jobs(args, dag=None): + queries = [] + if dag: + args.dag_id = dag.dag_id + if args.dag_id: + dagbag = DagBag() + + if args.dag_id not in dagbag.dags: + error_message = "Dag id {} not found".format(args.dag_id) + raise AirflowException(error_message) + queries.append(jobs.BaseJob.dag_id == args.dag_id) + + if args.state: + queries.append(jobs.BaseJob.state == args.state) + + with db.create_session() as session: + all_jobs = (session + .query(jobs.BaseJob) + .filter(*queries) + .order_by(jobs.BaseJob.start_date.desc()) + .limit(args.limit) + .all()) + fields = ['dag_id', 'state', 'job_type', 'start_date', 'end_date'] + all_jobs = [[job.__getattribute__(field) for field in fields] for job in all_jobs] + msg = tabulate(all_jobs, + [field.capitalize().replace('_', ' ') for field in fields], + tablefmt="fancy_grid") + if sys.version_info[0] < 3: + msg = msg.encode('utf-8') + print(msg) + + @cli_utils.action_logging def test(args, dag=None): # We want log outout from operators etc to show up here. Normally @@ -1560,6 +1593,12 @@ class CLIFactory(object): "Only list the dag runs corresponding to the state" ), + # list_jobs + 'limit': Arg( + ("--limit",), + "Return a limited number of records" + ), + # backfill 'mark_success': Arg( ("-m", "--mark_success"), @@ -1995,6 +2034,10 @@ class CLIFactory(object): 'func': list_tasks, 'help': "List the tasks within a DAG", 'args': ('dag_id', 'tree', 'subdir'), + }, { + 'func': list_jobs, + 'help': "List the jobs", + 'args': ('dag_id_opt', 'state', 'limit'), }, { 'func': clear, 'help': "Clear a set of task instance, as if they never ran", diff --git a/tests/core.py b/tests/core.py index 35ddbeccc0648..ca70df92c68d5 100644 --- a/tests/core.py +++ b/tests/core.py @@ -1227,6 +1227,17 @@ def test_cli_list_tasks(self): 'list_tasks', 'example_bash_operator', '--tree']) cli.list_tasks(args) + def test_cli_list_jobs(self): + args = self.parser.parse_args(['list_jobs']) + cli.list_jobs(args) + + def test_cli_list_jobs_with_args(self): + args = self.parser.parse_args(['list_jobs', '--dag_id', + 'example_bash_operator', + '--state', 'success', + '--limit', '100']) + cli.list_jobs(args) + @mock.patch("airflow.bin.cli.db.initdb") def test_cli_initdb(self, initdb_mock): cli.initdb(self.parser.parse_args(['initdb'])) From 43e00102719b2a3529adee17e245346e5d094cb8 Mon Sep 17 00:00:00 2001 From: andyh1203 Date: Tue, 29 Jan 2019 02:19:53 -0800 Subject: [PATCH 0025/1104] [AIRFLOW-3474] Move SlaMiss out of models.py (#4608) --- airflow/jobs.py | 4 ++-- airflow/models/__init__.py | 25 -------------------- airflow/models/slamiss.py | 48 ++++++++++++++++++++++++++++++++++++++ airflow/www/views.py | 3 ++- tests/test_jobs.py | 23 +++++++++--------- 5 files changed, 64 insertions(+), 39 deletions(-) create mode 100644 airflow/models/slamiss.py diff --git a/airflow/jobs.py b/airflow/jobs.py index 7832adcf64b95..86eaf80d47c09 100644 --- a/airflow/jobs.py +++ b/airflow/jobs.py @@ -44,6 +44,7 @@ from airflow.exceptions import AirflowException from airflow.models import DAG, DagRun, errors from airflow.models.dagpickle import DagPickle +from airflow.models.slamiss import SlaMiss from airflow.settings import Stats from airflow.task.task_runner import get_task_runner from airflow.ti_deps.dep_context import DepContext, QUEUE_DEPS, RUN_DEPS @@ -644,7 +645,6 @@ def manage_slas(self, dag, session=None): ).all() ts = timezone.utcnow() - SlaMiss = models.SlaMiss for ti in max_tis: task = dag.get_task(ti.task_id) dttm = ti.execution_date @@ -653,7 +653,7 @@ def manage_slas(self, dag, session=None): while dttm < timezone.utcnow(): following_schedule = dag.following_schedule(dttm) if following_schedule + task.sla < timezone.utcnow(): - session.merge(models.SlaMiss( + session.merge(SlaMiss( task_id=ti.task_id, dag_id=ti.dag_id, execution_date=dttm, diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 60f8cdcedba86..336b06f761340 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -5078,31 +5078,6 @@ def open_slots(self, session): return self.slots - used_slots - queued_slots -class SlaMiss(Base): - """ - Model that stores a history of the SLA that have been missed. - It is used to keep track of SLA failures over time and to avoid double - triggering alert emails. - """ - __tablename__ = "sla_miss" - - task_id = Column(String(ID_LEN), primary_key=True) - dag_id = Column(String(ID_LEN), primary_key=True) - execution_date = Column(UtcDateTime, primary_key=True) - email_sent = Column(Boolean, default=False) - timestamp = Column(UtcDateTime) - description = Column(Text) - notification_sent = Column(Boolean, default=False) - - __table_args__ = ( - Index('sm_dag', dag_id, unique=False), - ) - - def __repr__(self): - return str(( - self.dag_id, self.task_id, self.execution_date.isoformat())) - - class KubeResourceVersion(Base): __tablename__ = "kube_resource_version" one_row_id = Column(Boolean, server_default=sqltrue(), primary_key=True) diff --git a/airflow/models/slamiss.py b/airflow/models/slamiss.py new file mode 100644 index 0000000000000..fd1e13ad310ca --- /dev/null +++ b/airflow/models/slamiss.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from sqlalchemy import Boolean, Column, String, Index, Text + +from airflow.models import Base, ID_LEN +from airflow.utils.sqlalchemy import UtcDateTime + + +class SlaMiss(Base): + """ + Model that stores a history of the SLA that have been missed. + It is used to keep track of SLA failures over time and to avoid double + triggering alert emails. + """ + __tablename__ = "sla_miss" + + task_id = Column(String(ID_LEN), primary_key=True) + dag_id = Column(String(ID_LEN), primary_key=True) + execution_date = Column(UtcDateTime, primary_key=True) + email_sent = Column(Boolean, default=False) + timestamp = Column(UtcDateTime) + description = Column(Text) + notification_sent = Column(Boolean, default=False) + + __table_args__ = ( + Index('sm_dag', dag_id, unique=False), + ) + + def __repr__(self): + return str(( + self.dag_id, self.task_id, self.execution_date.isoformat())) diff --git a/airflow/www/views.py b/airflow/www/views.py index 63e88fa57fdf2..000c6f479208c 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -56,6 +56,7 @@ set_dag_run_state_to_failed) from airflow.models import XCom, DagRun, errors from airflow.models.connection import Connection +from airflow.models.slamiss import SlaMiss from airflow.ti_deps.dep_context import DepContext, QUEUE_DEPS, SCHEDULER_DEPS from airflow.utils import timezone from airflow.utils.dates import infer_time_unit, scale_time_units @@ -1885,7 +1886,7 @@ class AirflowModelView(ModelView): class SlaMissModelView(AirflowModelView): route_base = '/slamiss' - datamodel = AirflowModelView.CustomSQLAInterface(models.SlaMiss) + datamodel = AirflowModelView.CustomSQLAInterface(SlaMiss) base_permissions = ['can_list'] diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 4c8e149a925de..39f6a247314bd 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -48,6 +48,7 @@ from airflow.jobs import BaseJob, BackfillJob, SchedulerJob, LocalTaskJob from airflow.models import DAG, DagModel, DagBag, DagRun, Pool, TaskInstance as TI, \ errors +from airflow.models.slamiss import SlaMiss from airflow.operators.bash_operator import BashOperator from airflow.operators.dummy_operator import DummyOperator from airflow.task.task_runner.base_task_runner import BaseTaskRunner @@ -2974,11 +2975,11 @@ def test_scheduler_sla_miss_callback(self): state='success')) # Create an SlaMiss where notification was sent, but email was not - session.merge(models.SlaMiss(task_id='dummy', - dag_id='test_sla_miss', - execution_date=test_start_date, - email_sent=False, - notification_sent=True)) + session.merge(SlaMiss(task_id='dummy', + dag_id='test_sla_miss', + execution_date=test_start_date, + email_sent=False, + notification_sent=True)) # Now call manage_slas and see if the sla_miss callback gets called scheduler = SchedulerJob(dag_id='test_sla_miss', @@ -3011,9 +3012,9 @@ def test_scheduler_sla_miss_callback_exception(self): state='Success')) # Create an SlaMiss where notification was sent, but email was not - session.merge(models.SlaMiss(task_id='dummy', - dag_id='test_sla_miss', - execution_date=test_start_date)) + session.merge(SlaMiss(task_id='dummy', + dag_id='test_sla_miss', + execution_date=test_start_date)) # Now call manage_slas and see if the sla_miss callback gets called scheduler = SchedulerJob(dag_id='test_sla_miss') @@ -3053,9 +3054,9 @@ def test_scheduler_sla_miss_email_exception(self, mock_send_email): state='Success')) # Create an SlaMiss where notification was sent, but email was not - session.merge(models.SlaMiss(task_id='dummy', - dag_id='test_sla_miss', - execution_date=test_start_date)) + session.merge(SlaMiss(task_id='dummy', + dag_id='test_sla_miss', + execution_date=test_start_date)) scheduler = SchedulerJob(dag_id='test_sla_miss', num_runs=1) From 9f6e5463f85e5c55253445c43ab50a913cea0067 Mon Sep 17 00:00:00 2001 From: OmerJog <44576851+OmerJog@users.noreply.github.com> Date: Tue, 29 Jan 2019 12:23:40 +0200 Subject: [PATCH 0026/1104] [AIRFLOW-865] Configure FTP connection mode (#4535) --- airflow/contrib/hooks/ftp_hook.py | 9 ++++-- tests/contrib/hooks/test_ftp_hook.py | 45 ++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/airflow/contrib/hooks/ftp_hook.py b/airflow/contrib/hooks/ftp_hook.py index d67ebf82bc4c6..7d3cbd12320b7 100644 --- a/airflow/contrib/hooks/ftp_hook.py +++ b/airflow/contrib/hooks/ftp_hook.py @@ -64,8 +64,9 @@ class FTPHook(BaseHook, LoggingMixin): """ Interact with FTP. - Errors that may occur throughout but should be handled - downstream. + Errors that may occur throughout but should be handled downstream. + You can specify mode for data transfers in the extra field of your + connection as ``{"passive": "true"}``. """ def __init__(self, ftp_conn_id='ftp_default'): @@ -85,7 +86,9 @@ def get_conn(self): """ if self.conn is None: params = self.get_connection(self.ftp_conn_id) + pasv = params.extra_dejson.get("passive", True) self.conn = ftplib.FTP(params.host, params.login, params.password) + self.conn.set_pasv(pasv) return self.conn @@ -305,6 +308,7 @@ def get_conn(self): """ if self.conn is None: params = self.get_connection(self.ftp_conn_id) + pasv = params.extra_dejson.get("passive", True) if params.port: ftplib.FTP_TLS.port = params.port @@ -312,5 +316,6 @@ def get_conn(self): self.conn = ftplib.FTP_TLS( params.host, params.login, params.password ) + self.conn.set_pasv(pasv) return self.conn diff --git a/tests/contrib/hooks/test_ftp_hook.py b/tests/contrib/hooks/test_ftp_hook.py index 1274990827096..10f02ba526418 100644 --- a/tests/contrib/hooks/test_ftp_hook.py +++ b/tests/contrib/hooks/test_ftp_hook.py @@ -125,5 +125,50 @@ def test_retrieve_file_with_callback(self): self.conn_mock.retrbinary.assert_called_once_with('RETR path', func) +class TestIntegrationFTPHook(unittest.TestCase): + + def setUp(self): + super(TestIntegrationFTPHook, self).setUp() + from airflow import configuration + from airflow.utils import db + from airflow.models.connection import Connection + + configuration.load_test_config() + db.merge_conn( + Connection( + conn_id='ftp_passive', conn_type='ftp', + host='localhost', extra='{"passive": true}')) + + db.merge_conn( + Connection( + conn_id='ftp_active', conn_type='ftp', + host='localhost', extra='{"passive": false}')) + + def _test_mode(self, hook_type, connection_id, expected_mode): + hook = hook_type(connection_id) + conn = hook.get_conn() + conn.set_pasv.assert_called_with(expected_mode) + + @mock.patch("ftplib.FTP") + def test_ftp_passive_mode(self, ftp_mock): + from airflow.contrib.hooks.ftp_hook import FTPHook + self._test_mode(FTPHook, "ftp_passive", True) + + @mock.patch("ftplib.FTP") + def test_ftp_active_mode(self, ftp_mock): + from airflow.contrib.hooks.ftp_hook import FTPHook + self._test_mode(FTPHook, "ftp_active", False) + + @mock.patch("ftplib.FTP_TLS") + def test_ftps_passive_mode(self, ftps_mock): + from airflow.contrib.hooks.ftp_hook import FTPSHook + self._test_mode(FTPSHook, "ftp_passive", True) + + @mock.patch("ftplib.FTP_TLS") + def test_ftps_active_mode(self, ftps_mock): + from airflow.contrib.hooks.ftp_hook import FTPSHook + self._test_mode(FTPSHook, "ftp_active", False) + + if __name__ == '__main__': unittest.main() From fa09df570759a4cb8052c7e4dc0937d9f190a0f0 Mon Sep 17 00:00:00 2001 From: zhongjiajie Date: Tue, 29 Jan 2019 19:49:06 +0800 Subject: [PATCH 0027/1104] [AIRFLOW-3734] Fix hql not run when partition is None (#4561) --- airflow/hooks/hive_hooks.py | 4 ++-- tests/hooks/test_hive_hook.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py index 4c5788b874e34..224a9e163fb1f 100644 --- a/airflow/hooks/hive_hooks.py +++ b/airflow/hooks/hive_hooks.py @@ -451,11 +451,11 @@ def load_file( if partition: pvals = ", ".join( ["{0}='{1}'".format(k, v) for k, v in partition.items()]) - hql += "PARTITION ({pvals});" + hql += "PARTITION ({pvals})" # As a workaround for HIVE-10541, add a newline character # at the end of hql (AIRFLOW-2412). - hql += '\n' + hql += ';\n' hql = hql.format(**locals()) self.log.info(hql) diff --git a/tests/hooks/test_hive_hook.py b/tests/hooks/test_hive_hook.py index 22ccb28d782e3..24bed195994d1 100644 --- a/tests/hooks/test_hive_hook.py +++ b/tests/hooks/test_hive_hook.py @@ -138,7 +138,7 @@ def test_load_file(self, mock_run_cli): query = ( "LOAD DATA LOCAL INPATH '{filepath}' " - "OVERWRITE INTO TABLE {table} \n" + "OVERWRITE INTO TABLE {table} ;\n" .format(filepath=filepath, table=table) ) mock_run_cli.assert_called_with(query) From cd9d543b45366782cea6e46f2912102098b07d6b Mon Sep 17 00:00:00 2001 From: Felix Date: Tue, 29 Jan 2019 16:05:24 +0100 Subject: [PATCH 0028/1104] [AIRFLOW-3552] Add ImapToS3TransferOperator (#4476) NOTE: This operator only transfers the latest attachment by name. --- .../imap_attachment_to_s3_operator.py | 88 +++++++++++++++++++ docs/code.rst | 1 + .../test_imap_attachment_to_s3_operator.py | 57 ++++++++++++ 3 files changed, 146 insertions(+) create mode 100644 airflow/contrib/operators/imap_attachment_to_s3_operator.py create mode 100644 tests/contrib/operators/test_imap_attachment_to_s3_operator.py diff --git a/airflow/contrib/operators/imap_attachment_to_s3_operator.py b/airflow/contrib/operators/imap_attachment_to_s3_operator.py new file mode 100644 index 0000000000000..6126968c9c761 --- /dev/null +++ b/airflow/contrib/operators/imap_attachment_to_s3_operator.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from airflow.contrib.hooks.imap_hook import ImapHook +from airflow.hooks.S3_hook import S3Hook +from airflow.models import BaseOperator +from airflow.utils.decorators import apply_defaults + + +class ImapAttachmentToS3Operator(BaseOperator): + """ + Transfers a mail attachment from a mail server into s3 bucket. + + :param imap_attachment_name: The file name of the mail attachment that you want to transfer. + :type imap_attachment_name: str + :param s3_key: The destination file name in the s3 bucket for the attachment. + :type s3_key: str + :param imap_mail_folder: The folder on the mail server to look for the attachment. + :type imap_mail_folder: str + :param imap_check_regex: If set checks the `imap_attachment_name` for a regular expression. + :type imap_check_regex: bool + :param s3_overwrite: If set overwrites the s3 key if already exists. + :type s3_overwrite: bool + :param imap_conn_id: The reference to the connection details of the mail server. + :type imap_conn_id: str + :param s3_conn_id: The reference to the s3 connection details. + :type s3_conn_id: str + """ + template_fields = ('imap_attachment_name', 's3_key') + + @apply_defaults + def __init__(self, + imap_attachment_name, + s3_key, + imap_mail_folder='INBOX', + imap_check_regex=False, + s3_overwrite=False, + imap_conn_id='imap_default', + s3_conn_id='aws_default', + *args, + **kwargs): + super(ImapAttachmentToS3Operator, self).__init__(*args, **kwargs) + self.imap_attachment_name = imap_attachment_name + self.s3_key = s3_key + self.imap_mail_folder = imap_mail_folder + self.imap_check_regex = imap_check_regex + self.s3_overwrite = s3_overwrite + self.imap_conn_id = imap_conn_id + self.s3_conn_id = s3_conn_id + + def execute(self, context): + """ + This function executes the transfer from the email server (via imap) into s3. + + :param context: The context while executing. + :type context: dict + """ + self.log.info( + 'Transferring mail attachment %s from mail server via imap to s3 key %s...', + self.imap_attachment_name, self.s3_key + ) + + with ImapHook(imap_conn_id=self.imap_conn_id) as imap_hook: + imap_mail_attachments = imap_hook.retrieve_mail_attachments( + name=self.imap_attachment_name, + mail_folder=self.imap_mail_folder, + check_regex=self.imap_check_regex, + latest_only=True + ) + + s3_hook = S3Hook(aws_conn_id=self.s3_conn_id) + s3_hook.load_string(string_data=imap_mail_attachments[0][1], key=self.s3_key) diff --git a/docs/code.rst b/docs/code.rst index e2754d3fa4693..9fd0c3fb34d04 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -217,6 +217,7 @@ Operators .. autoclass:: airflow.contrib.operators.hipchat_operator.HipChatAPIOperator .. autoclass:: airflow.contrib.operators.hipchat_operator.HipChatAPISendRoomNotificationOperator .. autoclass:: airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator +.. autoclass:: airflow.contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator .. autoclass:: airflow.contrib.operators.jenkins_job_trigger_operator.JenkinsJobTriggerOperator .. autoclass:: airflow.contrib.operators.jira_operator.JiraOperator .. autoclass:: airflow.contrib.operators.kubernetes_pod_operator.KubernetesPodOperator diff --git a/tests/contrib/operators/test_imap_attachment_to_s3_operator.py b/tests/contrib/operators/test_imap_attachment_to_s3_operator.py new file mode 100644 index 0000000000000..0cde76cc94f1a --- /dev/null +++ b/tests/contrib/operators/test_imap_attachment_to_s3_operator.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +from mock import patch + +from airflow.contrib.operators.imap_attachment_to_s3_operator import ImapAttachmentToS3Operator + + +class TestImapAttachmentToS3Operator(unittest.TestCase): + + def setUp(self): + self.kwargs = dict( + imap_attachment_name='test_file', + s3_key='test_file', + imap_mail_folder='INBOX', + imap_check_regex=False, + s3_overwrite=False, + task_id='test_task', + dag=None + ) + + @patch('airflow.contrib.operators.imap_attachment_to_s3_operator.S3Hook') + @patch('airflow.contrib.operators.imap_attachment_to_s3_operator.ImapHook') + def test_execute(self, mock_imap_hook, mock_s3_hook): + mock_imap_hook.return_value.__enter__ = mock_imap_hook + mock_imap_hook.return_value.retrieve_mail_attachments.return_value = [('test_file', b'Hello World')] + + ImapAttachmentToS3Operator(**self.kwargs).execute(context={}) + + mock_imap_hook.return_value.retrieve_mail_attachments.assert_called_once_with( + name=self.kwargs['imap_attachment_name'], + mail_folder=self.kwargs['imap_mail_folder'], + check_regex=self.kwargs['imap_check_regex'], + latest_only=True + ) + mock_s3_hook.return_value.load_string.assert_called_once_with( + string_data=mock_imap_hook.return_value.retrieve_mail_attachments.return_value[0][1], + key=self.kwargs['s3_key'] + ) From fc22c6efad41a70e3d4ebd08e1b50e040505211d Mon Sep 17 00:00:00 2001 From: Tao Feng Date: Tue, 29 Jan 2019 11:55:13 -0800 Subject: [PATCH 0029/1104] [AIRFLOW-XXX] Update timezone doc (#4592) --- docs/timezone.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/timezone.rst b/docs/timezone.rst index 3ee7346b56e59..85adb3b8e87fc 100644 --- a/docs/timezone.rst +++ b/docs/timezone.rst @@ -49,7 +49,7 @@ Python’s datetime.datetime objects have a tzinfo attribute that can be used to represented as an instance of a subclass of datetime.tzinfo. When this attribute is set and describes an offset, a datetime object is aware. Otherwise, it’s naive. -You can use timezone.is_aware() and timezone.is_naive() to determine whether datetimes are aware or naive. +You can use timezone.is_localized() and timezone.is_naive() to determine whether datetimes are aware or naive. Because Airflow uses time-zone-aware datetime objects. If your code creates datetime objects they need to be aware too. From 0d64fd8aac7b93f4143d8f1d3baa4a0e239aba55 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Tue, 29 Jan 2019 19:56:08 +0000 Subject: [PATCH 0030/1104] [AIRFLOW-3742] Respect the `fallback` arg in airflow.configuration.get (#4567) This argument is part of the API from our parent class, but we didn't support it because of the various steps we perform in `get()` - this makes it behave more like the parent class, and can simplify a few instances in our code (I've only included one that I found here) --- airflow/configuration.py | 9 +++++---- airflow/settings.py | 16 +++++----------- tests/cli/test_worker_initialisation.py | 6 ++++-- tests/test_configuration.py | 6 ++++++ 4 files changed, 20 insertions(+), 17 deletions(-) diff --git a/airflow/configuration.py b/airflow/configuration.py index 70203975446b2..1de5c3aeb1354 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -36,7 +36,7 @@ import sys import warnings -from backports.configparser import ConfigParser +from backports.configparser import ConfigParser, _UNSET, NoOptionError from zope.deprecation import deprecated from airflow.exceptions import AirflowConfigException @@ -247,7 +247,7 @@ def get(self, section, key, **kwargs): return option # ...then the default config - if self.airflow_defaults.has_option(section, key): + if self.airflow_defaults.has_option(section, key) or 'fallback' in kwargs: return expand_env_var( self.airflow_defaults.get(section, key, **kwargs)) @@ -291,9 +291,10 @@ def has_option(self, section, option): try: # Using self.get() to avoid reimplementing the priority order # of config variables (env, config, cmd, defaults) - self.get(section, option) + # UNSET to avoid logging a warning about missing values + self.get(section, option, fallback=_UNSET) return True - except AirflowConfigException: + except NoOptionError: return False def remove_option(self, section, option, remove_default=True): diff --git a/airflow/settings.py b/airflow/settings.py index 4a5d99d17b279..035b0ff0e80fb 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -176,13 +176,10 @@ def configure_orm(disable_connection_pool=False): engine_args['pool_size'] = pool_size engine_args['pool_recycle'] = pool_recycle - try: - # Allow the user to specify an encoding for their DB otherwise default - # to utf-8 so jobs & users with non-latin1 characters can still use - # us. - engine_args['encoding'] = conf.get('core', 'SQL_ENGINE_ENCODING') - except conf.AirflowConfigException: - engine_args['encoding'] = 'utf-8' + # Allow the user to specify an encoding for their DB otherwise default + # to utf-8 so jobs & users with non-latin1 characters can still use + # us. + engine_args['encoding'] = conf.get('core', 'SQL_ENGINE_ENCODING', fallback='utf-8') # For Python2 we get back a newstr and need a str engine_args['encoding'] = engine_args['encoding'].__str__() @@ -226,10 +223,7 @@ def configure_adapters(): def validate_session(): - try: - worker_precheck = conf.getboolean('core', 'worker_precheck') - except conf.AirflowConfigException: - worker_precheck = False + worker_precheck = conf.getboolean('core', 'worker_precheck', fallback=False) if not worker_precheck: return True else: diff --git a/tests/cli/test_worker_initialisation.py b/tests/cli/test_worker_initialisation.py index 6397986e8a84e..8baa787a61b37 100644 --- a/tests/cli/test_worker_initialisation.py +++ b/tests/cli/test_worker_initialisation.py @@ -59,8 +59,10 @@ def test_worker_precheck_exception(self, mock_getboolean): Test to check the behaviour of validate_session method when worker_precheck is absent in airflow configuration """ - mock_getboolean.side_effect = airflow.configuration.AirflowConfigException - self.assertEqual(airflow.settings.validate_session(), True) + mock_getboolean.return_value = False + + self.assertTrue(airflow.settings.validate_session()) + mock_getboolean.assert_called_once_with('core', 'worker_precheck', fallback=False) @mock.patch('sqlalchemy.orm.session.Session.execute') @mock.patch('airflow.configuration.getboolean') diff --git a/tests/test_configuration.py b/tests/test_configuration.py index dba9891cbefb4..ba160f768bcc9 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -98,6 +98,8 @@ def test_env_var_config(self): opt = conf.get('testsection', 'testpercent') self.assertEqual(opt, 'with%percent') + self.assertTrue(conf.has_option('testsection', 'testkey')) + def test_conf_as_dict(self): cfg_dict = conf.as_dict() @@ -165,6 +167,10 @@ def test_command_config(self): self.assertEqual('key4_result', test_conf.get('test', 'key4')) self.assertEqual('value6', test_conf.get('another', 'key6')) + self.assertEqual('hello', test_conf.get('test', 'key1', fallback='fb')) + self.assertEqual('value6', test_conf.get('another', 'key6', fallback='fb')) + self.assertEqual('fb', test_conf.get('another', 'key7', fallback='fb')) + self.assertTrue(test_conf.has_option('test', 'key1')) self.assertTrue(test_conf.has_option('test', 'key2')) self.assertTrue(test_conf.has_option('test', 'key3')) From 2f688f69cb5f59db888041228dfb4a2ae06e5644 Mon Sep 17 00:00:00 2001 From: Stefan Seelmann Date: Tue, 29 Jan 2019 22:44:10 +0100 Subject: [PATCH 0031/1104] AIRFLOW-3590: Change log message of executor exit status (#4616) Try to make the log message clearer in the presence of rescheduled tasks - i.e that the task exited with 0/1, not the status of the task, without having each executor having to know about reschedule or other states we might introduce. --- airflow/jobs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/airflow/jobs.py b/airflow/jobs.py index 86eaf80d47c09..5949950d00cf9 100644 --- a/airflow/jobs.py +++ b/airflow/jobs.py @@ -1451,7 +1451,8 @@ def _process_executor_events(self, simple_dag_bag, session=None): .items()): dag_id, task_id, execution_date, try_number = key self.log.info( - "Executor reports %s.%s execution_date=%s as %s for try_number %s", + "Executor reports execution of %s.%s execution_date=%s " + "exited with status %s for try_number %s", dag_id, task_id, execution_date, state, try_number ) if state == State.FAILED or state == State.SUCCESS: From 26d775aa207e065ffa04ba43c0a8940ee9ec32ed Mon Sep 17 00:00:00 2001 From: Joshua Carp Date: Wed, 30 Jan 2019 11:52:19 -0500 Subject: [PATCH 0032/1104] [AIRFLOW-3789] Fix flake8 3.7 errors. (#4617) --- .../airflow_local_settings.py | 8 +-- airflow/contrib/hooks/gcp_dataflow_hook.py | 2 +- airflow/contrib/hooks/imap_hook.py | 6 +- .../pod_request_factory.py | 1 + airflow/contrib/kubernetes/pod_launcher.py | 5 +- .../contrib/operators/mlengine_operator.py | 2 +- airflow/contrib/operators/ssh_operator.py | 2 +- airflow/contrib/operators/winrm_operator.py | 2 +- airflow/contrib/utils/gcp_field_validator.py | 3 +- setup.py | 1 + tests/hooks/test_http_hook.py | 8 +-- tests/test_jobs.py | 66 +++++++++---------- 12 files changed, 55 insertions(+), 51 deletions(-) diff --git a/airflow/config_templates/airflow_local_settings.py b/airflow/config_templates/airflow_local_settings.py index 45a2f2923c058..b82755c939120 100644 --- a/airflow/config_templates/airflow_local_settings.py +++ b/airflow/config_templates/airflow_local_settings.py @@ -214,10 +214,10 @@ mkdirs(directory, 0o755) if REMOTE_LOGGING and REMOTE_BASE_LOG_FOLDER.startswith('s3://'): - DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['s3']) + DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['s3']) elif REMOTE_LOGGING and REMOTE_BASE_LOG_FOLDER.startswith('gs://'): - DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['gcs']) + DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['gcs']) elif REMOTE_LOGGING and REMOTE_BASE_LOG_FOLDER.startswith('wasb'): - DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['wasb']) + DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['wasb']) elif REMOTE_LOGGING and ELASTICSEARCH_HOST: - DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['elasticsearch']) + DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['elasticsearch']) diff --git a/airflow/contrib/hooks/gcp_dataflow_hook.py b/airflow/contrib/hooks/gcp_dataflow_hook.py index e444a62f87b1f..17f082efaa992 100644 --- a/airflow/contrib/hooks/gcp_dataflow_hook.py +++ b/airflow/contrib/hooks/gcp_dataflow_hook.py @@ -167,7 +167,7 @@ def wait_for_done(self): if self._proc.poll() is not None: # Mark process completion but allows its outputs to be consumed. process_ends = True - if self._proc.returncode is not 0: + if self._proc.returncode != 0: raise Exception("DataFlow failed with return code {}".format( self._proc.returncode)) return job_id diff --git a/airflow/contrib/hooks/imap_hook.py b/airflow/contrib/hooks/imap_hook.py index 1bc19fcdac380..79f08cd5649cb 100644 --- a/airflow/contrib/hooks/imap_hook.py +++ b/airflow/contrib/hooks/imap_hook.py @@ -141,11 +141,11 @@ def download_mail_attachments(self, self._create_files(mail_attachments, local_output_directory) def _handle_not_found_mode(self, not_found_mode): - if not_found_mode is 'raise': + if not_found_mode == 'raise': raise AirflowException('No mail attachments found!') - elif not_found_mode is 'warn': + elif not_found_mode == 'warn': self.log.warning('No mail attachments found!') - elif not_found_mode is 'ignore': + elif not_found_mode == 'ignore': pass # Do not notify if the attachment has not been found. else: self.log.error('Invalid "not_found_mode" %s', not_found_mode) diff --git a/airflow/contrib/kubernetes/kubernetes_request_factory/pod_request_factory.py b/airflow/contrib/kubernetes/kubernetes_request_factory/pod_request_factory.py index 059026b320240..cb1b6667c7e4f 100644 --- a/airflow/contrib/kubernetes/kubernetes_request_factory/pod_request_factory.py +++ b/airflow/contrib/kubernetes/kubernetes_request_factory/pod_request_factory.py @@ -16,6 +16,7 @@ # under the License. import yaml +from airflow.contrib.kubernetes.pod import Pod from airflow.contrib.kubernetes.kubernetes_request_factory.kubernetes_request_factory \ import KubernetesRequestFactory diff --git a/airflow/contrib/kubernetes/pod_launcher.py b/airflow/contrib/kubernetes/pod_launcher.py index 8c8d949107494..2704fd9d32715 100644 --- a/airflow/contrib/kubernetes/pod_launcher.py +++ b/airflow/contrib/kubernetes/pod_launcher.py @@ -20,6 +20,7 @@ from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.state import State from datetime import datetime as dt +from airflow.contrib.kubernetes.pod import Pod from airflow.contrib.kubernetes.kubernetes_request_factory import \ pod_request_factory as pod_factory from kubernetes import watch, client @@ -69,7 +70,7 @@ def delete_pod(self, pod): raise def run_pod(self, pod, startup_timeout=120, get_logs=True): - # type: (Pod) -> (State, result) + # type: (Pod, int, bool) -> (State, str) """ Launches the pod synchronously and waits for completion. Args: @@ -90,7 +91,7 @@ def run_pod(self, pod, startup_timeout=120, get_logs=True): return self._monitor_pod(pod, get_logs) def _monitor_pod(self, pod, get_logs): - # type: (Pod) -> (State, content) + # type: (Pod, bool) -> (State, str) if get_logs: logs = self._client.read_namespaced_pod_log( diff --git a/airflow/contrib/operators/mlengine_operator.py b/airflow/contrib/operators/mlengine_operator.py index 65015acb2150b..a43d369d44730 100644 --- a/airflow/contrib/operators/mlengine_operator.py +++ b/airflow/contrib/operators/mlengine_operator.py @@ -42,7 +42,7 @@ def _normalize_mlengine_job_id(job_id): # Add a prefix when a job_id starts with a digit or a template match = re.search(r'\d|\{{2}', job_id) - if match and match.start() is 0: + if match and match.start() == 0: job = 'z_{}'.format(job_id) else: job = job_id diff --git a/airflow/contrib/operators/ssh_operator.py b/airflow/contrib/operators/ssh_operator.py index 0556c0cd9a069..00462ad3d6889 100644 --- a/airflow/contrib/operators/ssh_operator.py +++ b/airflow/contrib/operators/ssh_operator.py @@ -147,7 +147,7 @@ def execute(self, context): stderr.close() exit_status = stdout.channel.recv_exit_status() - if exit_status is 0: + if exit_status == 0: # returning output if do_xcom_push is set if self.do_xcom_push: enable_pickling = configuration.conf.getboolean( diff --git a/airflow/contrib/operators/winrm_operator.py b/airflow/contrib/operators/winrm_operator.py index e8366b809da20..c3f96693f0e4d 100644 --- a/airflow/contrib/operators/winrm_operator.py +++ b/airflow/contrib/operators/winrm_operator.py @@ -126,7 +126,7 @@ def execute(self, context): except Exception as e: raise AirflowException("WinRM operator error: {0}".format(str(e))) - if return_code is 0: + if return_code == 0: # returning output if do_xcom_push is set if self.do_xcom_push: enable_pickling = configuration.conf.getboolean( diff --git a/airflow/contrib/utils/gcp_field_validator.py b/airflow/contrib/utils/gcp_field_validator.py index e8b59c855e27c..5ae1bdf9ce51e 100644 --- a/airflow/contrib/utils/gcp_field_validator.py +++ b/airflow/contrib/utils/gcp_field_validator.py @@ -134,6 +134,7 @@ import re +from typing import Callable from airflow import LoggingMixin, AirflowException COMPOSITE_FIELD_TYPES = ['union', 'dict', 'list'] @@ -209,7 +210,7 @@ def _get_field_name_with_parent(field_name, parent): @staticmethod def _sanity_checks(children_validation_specs, field_type, full_field_path, regexp, allow_empty, custom_validation, value): - # type: (dict, str, str, str, function, object) -> None + # type: (dict, str, str, str, Callable, object) -> None if value is None and field_type != 'union': raise GcpFieldValidationException( "The required body field '{}' is missing. Please add it.". diff --git a/setup.py b/setup.py index 195da0c798eef..623ce3fb054d7 100644 --- a/setup.py +++ b/setup.py @@ -248,6 +248,7 @@ def write_version(filename=os.path.join(*['airflow', 'rednose', 'requests_mock', 'flake8>=3.6.0', + 'typing', ] if not PY3: diff --git a/tests/hooks/test_http_hook.py b/tests/hooks/test_http_hook.py index d55afc4650a5c..ca325940dcb4e 100644 --- a/tests/hooks/test_http_hook.py +++ b/tests/hooks/test_http_hook.py @@ -140,13 +140,13 @@ def test_hook_contains_header_from_extra_field(self, m): @requests_mock.mock() def test_hook_uses_provided_header(self, m): - conn = self.get_hook.get_conn(headers={"bareer": "newT0k3n"}) - self.assertEqual(conn.headers.get('bareer'), "newT0k3n") + conn = self.get_hook.get_conn(headers={"bareer": "newT0k3n"}) + self.assertEqual(conn.headers.get('bareer'), "newT0k3n") @requests_mock.mock() def test_hook_has_no_header_from_extra(self, m): - conn = self.get_hook.get_conn() - self.assertIsNone(conn.headers.get('bareer')) + conn = self.get_hook.get_conn() + self.assertIsNone(conn.headers.get('bareer')) @requests_mock.mock() def test_hooks_header_from_extra_is_overridden(self, m): diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 39f6a247314bd..85550ee984faa 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -341,44 +341,44 @@ def test_backfill_rerun_failed_tasks(self): self.assertEqual(ti.state, State.SUCCESS) def test_backfill_rerun_upstream_failed_tasks(self): - dag = DAG( - dag_id='test_backfill_rerun_upstream_failed', - start_date=DEFAULT_DATE, - schedule_interval='@daily') + dag = DAG( + dag_id='test_backfill_rerun_upstream_failed', + start_date=DEFAULT_DATE, + schedule_interval='@daily') - with dag: - t1 = DummyOperator(task_id='test_backfill_rerun_upstream_failed_task-1', - dag=dag) - t2 = DummyOperator(task_id='test_backfill_rerun_upstream_failed_task-2', - dag=dag) - t1.set_upstream(t2) + with dag: + t1 = DummyOperator(task_id='test_backfill_rerun_upstream_failed_task-1', + dag=dag) + t2 = DummyOperator(task_id='test_backfill_rerun_upstream_failed_task-2', + dag=dag) + t1.set_upstream(t2) - dag.clear() - executor = TestExecutor(do_update=True) + dag.clear() + executor = TestExecutor(do_update=True) - job = BackfillJob(dag=dag, - executor=executor, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - ) - job.run() + job = BackfillJob(dag=dag, + executor=executor, + start_date=DEFAULT_DATE, + end_date=DEFAULT_DATE + datetime.timedelta(days=2), + ) + job.run() - ti = TI(task=dag.get_task('test_backfill_rerun_upstream_failed_task-1'), - execution_date=DEFAULT_DATE) - ti.refresh_from_db() - ti.set_state(State.UPSTREAM_FAILED) + ti = TI(task=dag.get_task('test_backfill_rerun_upstream_failed_task-1'), + execution_date=DEFAULT_DATE) + ti.refresh_from_db() + ti.set_state(State.UPSTREAM_FAILED) - job = BackfillJob(dag=dag, - executor=executor, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - rerun_failed_tasks=True - ) - job.run() - ti = TI(task=dag.get_task('test_backfill_rerun_upstream_failed_task-1'), - execution_date=DEFAULT_DATE) - ti.refresh_from_db() - self.assertEqual(ti.state, State.SUCCESS) + job = BackfillJob(dag=dag, + executor=executor, + start_date=DEFAULT_DATE, + end_date=DEFAULT_DATE + datetime.timedelta(days=2), + rerun_failed_tasks=True + ) + job.run() + ti = TI(task=dag.get_task('test_backfill_rerun_upstream_failed_task-1'), + execution_date=DEFAULT_DATE) + ti.refresh_from_db() + self.assertEqual(ti.state, State.SUCCESS) def test_backfill_rerun_failed_tasks_without_flag(self): dag = DAG( From 82c60a2040a3f306e5f880340c3f646d4f629884 Mon Sep 17 00:00:00 2001 From: "Drew J. Sonne" Date: Wed, 30 Jan 2019 19:10:25 +0000 Subject: [PATCH 0033/1104] [AIRFLOW-3774] Register blueprints with app (#4598) --- airflow/plugins_manager.py | 5 ++++- airflow/www/app.py | 11 ++++++++++- tests/plugins/test_plugins_manager.py | 7 +++++++ 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py index d0efe62da9e25..811e1b4c58122 100644 --- a/airflow/plugins_manager.py +++ b/airflow/plugins_manager.py @@ -194,7 +194,10 @@ def make_module(name, objects): macros_modules.append(make_module('airflow.macros.' + p.name, p.macros)) admin_views.extend(p.admin_views) - flask_blueprints.extend(p.flask_blueprints) menu_links.extend(p.menu_links) flask_appbuilder_views.extend(p.appbuilder_views) flask_appbuilder_menu_links.extend(p.appbuilder_menu_items) + flask_blueprints.extend([{ + 'name': p.name, + 'blueprint': bp + } for bp in p.flask_blueprints]) diff --git a/airflow/www/app.py b/airflow/www/app.py index 2251dcedecf38..aa54412eec576 100644 --- a/airflow/www/app.py +++ b/airflow/www/app.py @@ -141,7 +141,8 @@ def init_views(appbuilder): def integrate_plugins(): """Integrate plugins to the context""" from airflow.plugins_manager import ( - flask_appbuilder_views, flask_appbuilder_menu_links) + flask_appbuilder_views, flask_appbuilder_menu_links + ) for v in flask_appbuilder_views: log.debug("Adding view %s", v["name"]) @@ -161,7 +162,15 @@ def integrate_plugins(): # will add the new Views and Menus names to the backend, but will not # delete the old ones. + def init_plugin_blueprints(app): + from airflow.plugins_manager import flask_blueprints + + for bp in flask_blueprints: + log.debug("Adding blueprint %s:%s", bp["name"], bp["blueprint"].import_name) + app.register_blueprint(bp["blueprint"]) + init_views(appbuilder) + init_plugin_blueprints(app) security_manager = appbuilder.sm security_manager.sync_roles() diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py index e5a1ecce3431d..d15cd5e3158a8 100644 --- a/tests/plugins/test_plugins_manager.py +++ b/tests/plugins/test_plugins_manager.py @@ -66,3 +66,10 @@ def test_flaskappbuilder_menu_links(self): link = links[0] self.assertEqual(link.name, appbuilder_mitem['category']) self.assertEqual(link.childs[0].name, appbuilder_mitem['name']) + + def test_app_blueprints(self): + from tests.plugins.test_plugin import bp + + # Blueprint should be present in the app + self.assertTrue('test_plugin' in self.app.blueprints) + self.assertEqual(self.app.blueprints['test_plugin'].name, bp.name) From 7ebecd677d427e90e292cb7446d96227bd0d6ae1 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Wed, 30 Jan 2019 19:19:41 +0000 Subject: [PATCH 0034/1104] [AIRFLOW-3779] Don't install enum34 backport when not needed (#4620) https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies Installing this in more recent versions causes a "AttributeError: module 'enum' has no attribute 'IntFlag'`" in re.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 623ce3fb054d7..afee79651f1f4 100644 --- a/setup.py +++ b/setup.py @@ -291,7 +291,7 @@ def do_setup(): 'configparser>=3.5.0, <3.6.0', 'croniter>=0.3.17, <0.4', 'dill>=0.2.2, <0.3', - 'enum34~=1.1.6', + 'enum34~=1.1.6;python_version<"3.4"', 'flask>=0.12.4, <0.13', 'flask-appbuilder==1.12.1', 'flask-admin==1.5.2', From cd4c61a2de8726e7473fac1758875b0c263ae3eb Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Wed, 30 Jan 2019 14:24:19 -0800 Subject: [PATCH 0035/1104] [AIRFLOW-3787] Import/export users from JSON file (#4624) * [AIRFLOW-3787] Import/export users from JSON file Provide a CLI command to import or export users from a JSON file. The CLI arguments are modeled after the import/export commands for Variables and Pools. Example Usage: airflow users -i users.json airflow users -e /tmp/exported-users.json The import command will create any users that do not yet exist and update any users that already exist. It never deletes users. The format of the file produced by an export is compatible with the import command, i.e., `import(export())` should succeed but have no side-effects. * Add input file format to help text --- airflow/bin/cli.py | 124 ++++++++++++++++++++++++++++++++++++++++++++- tests/core.py | 121 +++++++++++++++++++++++++++++++++++++++---- 2 files changed, 234 insertions(+), 11 deletions(-) diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index df98e4d83510e..b2aa9191bcd44 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -31,6 +31,7 @@ import getpass import reprlib import argparse +from argparse import RawTextHelpFormatter from builtins import input from collections import namedtuple @@ -1474,6 +1475,107 @@ def users(args): print('User "{}" added to role "{}".'.format( user, args.role)) + elif args.export: + appbuilder = cached_appbuilder() + users = appbuilder.sm.get_all_users() + fields = ['id', 'username', 'email', 'first_name', 'last_name', 'roles'] + + # In the User model the first and last name fields have underscores, + # but the corresponding parameters in the CLI don't + def remove_underscores(s): + return re.sub("_", "", s) + + users = [ + {remove_underscores(field): user.__getattribute__(field) + if field != 'roles' else [r.name for r in user.roles] + for field in fields} + for user in users + ] + + with open(args.export, 'w') as f: + f.write(json.dumps(users, sort_keys=True, indent=4)) + print("{} users successfully exported to {}".format(len(users), f.name)) + + elif getattr(args, 'import'): # "import" is a reserved word + json_file = getattr(args, 'import') + if not os.path.exists(json_file): + print("File '{}' does not exist") + exit(1) + + users_list = None + try: + with open(json_file, 'r') as f: + users_list = json.loads(f.read()) + except ValueError as e: + print("File '{}' is not valid JSON. Error: {}".format(json_file, e)) + exit(1) + + users_created, users_updated = _import_users(users_list) + if users_created: + print("Created the following users:\n\t{}".format( + "\n\t".join(users_created))) + + if users_updated: + print("Updated the following users:\n\t{}".format( + "\n\t".join(users_updated))) + + +def _import_users(users_list): + appbuilder = cached_appbuilder() + users_created = [] + users_updated = [] + + for user in users_list: + roles = [] + for rolename in user['roles']: + role = appbuilder.sm.find_role(rolename) + if not role: + print("Error: '{}' is not a valid role".format(rolename)) + exit(1) + else: + roles.append(role) + + required_fields = ['username', 'firstname', 'lastname', + 'email', 'roles'] + for field in required_fields: + if not user.get(field): + print("Error: '{}' is a required field, but was not " + "specified".format(field)) + exit(1) + + existing_user = appbuilder.sm.find_user(email=user['email']) + if existing_user: + print("Found existing user with email '{}'".format(user['email'])) + existing_user.roles = roles + existing_user.first_name = user['firstname'] + existing_user.last_name = user['lastname'] + + if existing_user.username != user['username']: + print("Error: Changing ther username is not allowed - " + "please delete and recreate the user with " + "email '{}'".format(user['email'])) + exit(1) + + appbuilder.sm.update_user(existing_user) + users_updated.append(user['email']) + else: + print("Creating new user with email '{}'".format(user['email'])) + appbuilder.sm.add_user( + username=user['username'], + first_name=user['firstname'], + last_name=user['lastname'], + email=user['email'], + role=roles[0], # add_user() requires exactly 1 role + ) + + if len(roles) > 1: + new_user = appbuilder.sm.find_user(email=user['email']) + new_user.roles = roles + appbuilder.sm.update_user(new_user) + + users_created.append(user['email']) + + return users_created, users_updated @cli_utils.action_logging @@ -1998,6 +2100,25 @@ class CLIFactory(object): ('--remove-role',), help='Remove user from a role', action='store_true'), + 'user_import': Arg( + ("-i", "--import"), + metavar="FILEPATH", + help="Import users from JSON file. Example format:" + + textwrap.dedent(''' + [ + { + "email": "foo@bar.org", + "firstname": "Jon", + "lastname": "Doe", + "roles": ["Public"], + "username": "jondoe" + } + ]'''), + ), + 'user_export': Arg( + ("-e", "--export"), + metavar="FILEPATH", + help="Export users to JSON file"), 'autoscale': Arg( ('-a', '--autoscale'), help="Minimum and Maximum number of worker to autoscale"), @@ -2166,7 +2287,7 @@ class CLIFactory(object): 'func': users, 'help': "List/Create/Delete/Update users", 'args': ('list_users', 'create_user', 'delete_user', - 'add_role', 'remove_role', + 'add_role', 'remove_role', 'user_import', 'user_export', 'username', 'email', 'firstname', 'lastname', 'role', 'password', 'use_random_password'), }, @@ -2203,6 +2324,7 @@ def get_parser(cls, dag_parser=False): for sub in subparser_list: sub = cls.subparsers_dict[sub] sp = subparsers.add_parser(sub['func'].__name__, help=sub['help']) + sp.formatter_class = RawTextHelpFormatter for arg in sub['args']: if 'dag_id' in arg and dag_parser: continue diff --git a/tests/core.py b/tests/core.py index ca70df92c68d5..0db725d7770a6 100644 --- a/tests/core.py +++ b/tests/core.py @@ -40,6 +40,7 @@ from email.mime.text import MIMEText from numpy.testing import assert_array_almost_equal from six.moves.urllib.parse import urlencode +from tempfile import NamedTemporaryFile from time import sleep from airflow import configuration @@ -1061,7 +1062,8 @@ def test_externally_triggered_dagrun(self): class CliTests(unittest.TestCase): - TEST_USER_EMAIL = 'test-user@example.com' + TEST_USER1_EMAIL = 'test-user1@example.com' + TEST_USER2_EMAIL = 'test-user2@example.com' @classmethod def setUpClass(cls): @@ -1082,9 +1084,10 @@ def setUp(self): def tearDown(self): self._cleanup(session=self.session) - test_user = self.appbuilder.sm.find_user(email=CliTests.TEST_USER_EMAIL) - if test_user: - self.appbuilder.sm.del_register_user(test_user) + for email in [self.TEST_USER1_EMAIL, self.TEST_USER2_EMAIL]: + test_user = self.appbuilder.sm.find_user(email=email) + if test_user: + self.appbuilder.sm.del_register_user(test_user) super(CliTests, self).tearDown() @staticmethod @@ -1153,6 +1156,104 @@ def test_cli_list_users(self): for i in range(0, 3): self.assertIn('user{}'.format(i), stdout) + def test_cli_import_users(self): + def assertUserInRoles(email, roles): + for role in roles: + self.assertTrue(self._does_user_belong_to_role(email, role)) + + def assertUserNotInRoles(email, roles): + for role in roles: + self.assertFalse(self._does_user_belong_to_role(email, role)) + + assertUserNotInRoles(self.TEST_USER1_EMAIL, ['Admin', 'Op']) + assertUserNotInRoles(self.TEST_USER2_EMAIL, ['Public']) + users = [ + { + "username": "imported_user1", "lastname": "doe1", + "firstname": "jon", "email": self.TEST_USER1_EMAIL, + "roles": ["Admin", "Op"] + }, + { + "username": "imported_user2", "lastname": "doe2", + "firstname": "jon", "email": self.TEST_USER2_EMAIL, + "roles": ["Public"] + } + ] + self._import_users_from_file(users) + + assertUserInRoles(self.TEST_USER1_EMAIL, ['Admin', 'Op']) + assertUserInRoles(self.TEST_USER2_EMAIL, ['Public']) + + users = [ + { + "username": "imported_user1", "lastname": "doe1", + "firstname": "jon", "email": self.TEST_USER1_EMAIL, + "roles": ["Public"] + }, + { + "username": "imported_user2", "lastname": "doe2", + "firstname": "jon", "email": self.TEST_USER2_EMAIL, + "roles": ["Admin"] + } + ] + self._import_users_from_file(users) + + assertUserNotInRoles(self.TEST_USER1_EMAIL, ['Admin', 'Op']) + assertUserInRoles(self.TEST_USER1_EMAIL, ['Public']) + assertUserNotInRoles(self.TEST_USER2_EMAIL, ['Public']) + assertUserInRoles(self.TEST_USER2_EMAIL, ['Admin']) + + def test_cli_export_users(self): + user1 = {"username": "imported_user1", "lastname": "doe1", + "firstname": "jon", "email": self.TEST_USER1_EMAIL, + "roles": ["Public"]} + user2 = {"username": "imported_user2", "lastname": "doe2", + "firstname": "jon", "email": self.TEST_USER2_EMAIL, + "roles": ["Admin"]} + self._import_users_from_file([user1, user2]) + + users_filename = self._export_users_to_file() + with open(users_filename, mode='r') as f: + retrieved_users = json.loads(f.read()) + os.remove(users_filename) + + # ensure that an export can be imported + self._import_users_from_file(retrieved_users) + + def find_by_username(username): + matches = [u for u in retrieved_users + if u['username'] == username] + if not matches: + self.fail("Couldn't find user with username {}".format(username)) + else: + matches[0].pop('id') # this key not required for import + return matches[0] + + self.assertEqual(find_by_username('imported_user1'), user1) + self.assertEqual(find_by_username('imported_user2'), user2) + + def _import_users_from_file(self, user_list): + json_file_content = json.dumps(user_list) + f = NamedTemporaryFile(delete=False) + try: + f.write(json_file_content.encode()) + f.flush() + + args = self.parser.parse_args([ + 'users', '-i', f.name + ]) + cli.users(args) + finally: + os.remove(f.name) + + def _export_users_to_file(self): + f = NamedTemporaryFile(delete=False) + args = self.parser.parse_args([ + 'users', '-e', f.name + ]) + cli.users(args) + return f.name + def _does_user_belong_to_role(self, email, rolename): user = self.appbuilder.sm.find_user(email=email) role = self.appbuilder.sm.find_role(rolename) @@ -1165,12 +1266,12 @@ def test_cli_add_user_role(self): args = self.parser.parse_args([ 'users', '-c', '--username', 'test4', '--lastname', 'doe', '--firstname', 'jon', - '--email', self.TEST_USER_EMAIL, '--role', 'Viewer', '--use_random_password' + '--email', self.TEST_USER1_EMAIL, '--role', 'Viewer', '--use_random_password' ]) cli.users(args) self.assertFalse( - self._does_user_belong_to_role(email=self.TEST_USER_EMAIL, + self._does_user_belong_to_role(email=self.TEST_USER1_EMAIL, rolename='Op'), "User should not yet be a member of role 'Op'" ) @@ -1181,7 +1282,7 @@ def test_cli_add_user_role(self): cli.users(args) self.assertTrue( - self._does_user_belong_to_role(email=self.TEST_USER_EMAIL, + self._does_user_belong_to_role(email=self.TEST_USER1_EMAIL, rolename='Op'), "User should have been added to role 'Op'" ) @@ -1190,12 +1291,12 @@ def test_cli_remove_user_role(self): args = self.parser.parse_args([ 'users', '-c', '--username', 'test4', '--lastname', 'doe', '--firstname', 'jon', - '--email', self.TEST_USER_EMAIL, '--role', 'Viewer', '--use_random_password' + '--email', self.TEST_USER1_EMAIL, '--role', 'Viewer', '--use_random_password' ]) cli.users(args) self.assertTrue( - self._does_user_belong_to_role(email=self.TEST_USER_EMAIL, + self._does_user_belong_to_role(email=self.TEST_USER1_EMAIL, rolename='Viewer'), "User should have been created with role 'Viewer'" ) @@ -1206,7 +1307,7 @@ def test_cli_remove_user_role(self): cli.users(args) self.assertFalse( - self._does_user_belong_to_role(email=self.TEST_USER_EMAIL, + self._does_user_belong_to_role(email=self.TEST_USER1_EMAIL, rolename='Viewer'), "User should have been removed from role 'Viewer'" ) From 8e6bca15464afaa50994c800034c9efd48e1430a Mon Sep 17 00:00:00 2001 From: Stefan Seelmann Date: Wed, 30 Jan 2019 23:31:37 +0100 Subject: [PATCH 0036/1104] [AIRFLOW-3462] Move TaskReschedule out of models.py (#4618) --- airflow/jobs.py | 2 +- airflow/models/__init__.py | 69 +-------------- airflow/models/base.py | 2 + airflow/models/connection.py | 3 +- airflow/models/slamiss.py | 2 +- airflow/models/taskreschedule.py | 84 +++++++++++++++++++ airflow/sensors/base_sensor_operator.py | 3 +- airflow/ti_deps/deps/ready_to_reschedule.py | 3 +- tests/models.py | 6 +- tests/sensors/test_base_sensor.py | 3 +- .../deps/test_ready_to_reschedule_dep.py | 13 +-- 11 files changed, 109 insertions(+), 81 deletions(-) create mode 100755 airflow/models/taskreschedule.py diff --git a/airflow/jobs.py b/airflow/jobs.py index 5949950d00cf9..552e12c21f106 100644 --- a/airflow/jobs.py +++ b/airflow/jobs.py @@ -64,7 +64,7 @@ from airflow.utils.state import State Base = models.base.Base -ID_LEN = models.ID_LEN +ID_LEN = models.base.ID_LEN class BaseJob(Base, LoggingMixin): diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 336b06f761340..83c38adbdbb98 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -28,7 +28,7 @@ from builtins import ImportError as BuiltinImportError, bytes, object, str from future.standard_library import install_aliases -from airflow.models.base import Base +from airflow.models.base import Base, ID_LEN try: # Fix Python > 3.7 deprecation @@ -63,9 +63,8 @@ from urllib.parse import quote from sqlalchemy import ( - Boolean, Column, DateTime, Float, ForeignKeyConstraint, Index, - Integer, LargeBinary, PickleType, String, Text, UniqueConstraint, and_, asc, - func, or_, true as sqltrue + Boolean, Column, DateTime, Float, Index, Integer, LargeBinary, PickleType, String, + Text, UniqueConstraint, and_, func, or_, true as sqltrue ) from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.orm import reconstructor, synonym @@ -85,6 +84,7 @@ from airflow.dag.base_dag import BaseDag, BaseDagBag from airflow.lineage import apply_lineage, prepare_lineage from airflow.models.dagpickle import DagPickle +from airflow.models.taskreschedule import TaskReschedule from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep @@ -109,7 +109,6 @@ install_aliases() -ID_LEN = 250 XCOM_RETURN_KEY = 'return_value' Stats = settings.Stats @@ -1888,66 +1887,6 @@ def __init__(self, task, execution_date, start_date, end_date): self.duration = None -class TaskReschedule(Base): - """ - TaskReschedule tracks rescheduled task instances. - """ - - __tablename__ = "task_reschedule" - - id = Column(Integer, primary_key=True) - task_id = Column(String(ID_LEN), nullable=False) - dag_id = Column(String(ID_LEN), nullable=False) - execution_date = Column(UtcDateTime, nullable=False) - try_number = Column(Integer, nullable=False) - start_date = Column(UtcDateTime, nullable=False) - end_date = Column(UtcDateTime, nullable=False) - duration = Column(Integer, nullable=False) - reschedule_date = Column(UtcDateTime, nullable=False) - - __table_args__ = ( - Index('idx_task_reschedule_dag_task_date', dag_id, task_id, execution_date, - unique=False), - ForeignKeyConstraint([task_id, dag_id, execution_date], - [TaskInstance.task_id, TaskInstance.dag_id, - TaskInstance.execution_date], - name='task_reschedule_dag_task_date_fkey') - ) - - def __init__(self, task, execution_date, try_number, start_date, end_date, - reschedule_date): - self.dag_id = task.dag_id - self.task_id = task.task_id - self.execution_date = execution_date - self.try_number = try_number - self.start_date = start_date - self.end_date = end_date - self.reschedule_date = reschedule_date - self.duration = (self.end_date - self.start_date).total_seconds() - - @staticmethod - @provide_session - def find_for_task_instance(task_instance, session): - """ - Returns all task reschedules for the task instance and try number, - in ascending order. - - :param task_instance: the task instance to find task reschedules for - :type task_instance: TaskInstance - """ - TR = TaskReschedule - return ( - session - .query(TR) - .filter(TR.dag_id == task_instance.dag_id, - TR.task_id == task_instance.task_id, - TR.execution_date == task_instance.execution_date, - TR.try_number == task_instance.try_number) - .order_by(asc(TR.id)) - .all() - ) - - class Log(Base): """ Used to actively log events to the database diff --git a/airflow/models/base.py b/airflow/models/base.py index 1a06b0b6259f7..3f5eb80154b73 100644 --- a/airflow/models/base.py +++ b/airflow/models/base.py @@ -28,3 +28,5 @@ Base = declarative_base() else: Base = declarative_base(metadata=MetaData(schema=SQL_ALCHEMY_SCHEMA)) + +ID_LEN = 250 diff --git a/airflow/models/connection.py b/airflow/models/connection.py index a6f854e18cc29..913ee5db612b5 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -26,7 +26,8 @@ from sqlalchemy.orm import synonym from airflow import LoggingMixin, AirflowException -from airflow.models import Base, ID_LEN, get_fernet +from airflow.models import get_fernet +from airflow.models.base import Base, ID_LEN class Connection(Base, LoggingMixin): diff --git a/airflow/models/slamiss.py b/airflow/models/slamiss.py index fd1e13ad310ca..0981be886ea01 100644 --- a/airflow/models/slamiss.py +++ b/airflow/models/slamiss.py @@ -19,7 +19,7 @@ from sqlalchemy import Boolean, Column, String, Index, Text -from airflow.models import Base, ID_LEN +from airflow.models.base import Base, ID_LEN from airflow.utils.sqlalchemy import UtcDateTime diff --git a/airflow/models/taskreschedule.py b/airflow/models/taskreschedule.py new file mode 100755 index 0000000000000..0beedee01225f --- /dev/null +++ b/airflow/models/taskreschedule.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from sqlalchemy import Column, ForeignKeyConstraint, Index, Integer, String, asc + +from airflow.models.base import Base, ID_LEN +from airflow.utils.db import provide_session +from airflow.utils.sqlalchemy import UtcDateTime + + +class TaskReschedule(Base): + """ + TaskReschedule tracks rescheduled task instances. + """ + + __tablename__ = "task_reschedule" + + id = Column(Integer, primary_key=True) + task_id = Column(String(ID_LEN), nullable=False) + dag_id = Column(String(ID_LEN), nullable=False) + execution_date = Column(UtcDateTime, nullable=False) + try_number = Column(Integer, nullable=False) + start_date = Column(UtcDateTime, nullable=False) + end_date = Column(UtcDateTime, nullable=False) + duration = Column(Integer, nullable=False) + reschedule_date = Column(UtcDateTime, nullable=False) + + __table_args__ = ( + Index('idx_task_reschedule_dag_task_date', dag_id, task_id, execution_date, + unique=False), + ForeignKeyConstraint([task_id, dag_id, execution_date], + ['task_instance.task_id', 'task_instance.dag_id', + 'task_instance.execution_date'], + name='task_reschedule_dag_task_date_fkey') + ) + + def __init__(self, task, execution_date, try_number, start_date, end_date, + reschedule_date): + self.dag_id = task.dag_id + self.task_id = task.task_id + self.execution_date = execution_date + self.try_number = try_number + self.start_date = start_date + self.end_date = end_date + self.reschedule_date = reschedule_date + self.duration = (self.end_date - self.start_date).total_seconds() + + @staticmethod + @provide_session + def find_for_task_instance(task_instance, session): + """ + Returns all task reschedules for the task instance and try number, + in ascending order. + + :param task_instance: the task instance to find task reschedules for + :type task_instance: TaskInstance + """ + TR = TaskReschedule + return ( + session + .query(TR) + .filter(TR.dag_id == task_instance.dag_id, + TR.task_id == task_instance.task_id, + TR.execution_date == task_instance.execution_date, + TR.try_number == task_instance.try_number) + .order_by(asc(TR.id)) + .all() + ) diff --git a/airflow/sensors/base_sensor_operator.py b/airflow/sensors/base_sensor_operator.py index 4fbe0b6f8517e..8f12e23855fee 100644 --- a/airflow/sensors/base_sensor_operator.py +++ b/airflow/sensors/base_sensor_operator.py @@ -23,7 +23,8 @@ from airflow.exceptions import AirflowException, AirflowSensorTimeout, \ AirflowSkipException, AirflowRescheduleException -from airflow.models import BaseOperator, SkipMixin, TaskReschedule +from airflow.models import BaseOperator, SkipMixin +from airflow.models.taskreschedule import TaskReschedule from airflow.utils import timezone from airflow.utils.decorators import apply_defaults from airflow.ti_deps.deps.ready_to_reschedule import ReadyToRescheduleDep diff --git a/airflow/ti_deps/deps/ready_to_reschedule.py b/airflow/ti_deps/deps/ready_to_reschedule.py index dc1c92c654f43..348cf00f5feb6 100644 --- a/airflow/ti_deps/deps/ready_to_reschedule.py +++ b/airflow/ti_deps/deps/ready_to_reschedule.py @@ -17,6 +17,7 @@ # specific language governing permissions and limitations # under the License. +from airflow.models.taskreschedule import TaskReschedule from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.utils import timezone from airflow.utils.db import provide_session @@ -49,8 +50,6 @@ def _get_dep_statuses(self, ti, session, dep_context): reason="The task instance is not in State_UP_FOR_RESCHEDULE or NONE state.") return - # Lazy import to avoid circular dependency - from airflow.models import TaskReschedule task_reschedules = TaskReschedule.find_for_task_instance(task_instance=ti) if not task_reschedules: yield self._passing_status( diff --git a/tests/models.py b/tests/models.py index b3d41275ca874..8cd43b450e778 100644 --- a/tests/models.py +++ b/tests/models.py @@ -50,11 +50,11 @@ from airflow.models import KubeResourceVersion, KubeWorkerIdentifier from airflow.models import SkipMixin from airflow.models import State as ST -from airflow.models import TaskReschedule as TR from airflow.models import XCom from airflow.models import Variable from airflow.models import clear_task_instances from airflow.models.connection import Connection +from airflow.models.taskreschedule import TaskReschedule from airflow.operators.bash_operator import BashOperator from airflow.operators.dummy_operator import DummyOperator from airflow.operators.python_operator import PythonOperator @@ -1946,7 +1946,7 @@ class TaskInstanceTest(unittest.TestCase): def tearDown(self): with create_session() as session: session.query(models.TaskFail).delete() - session.query(models.TaskReschedule).delete() + session.query(TaskReschedule).delete() session.query(models.TaskInstance).delete() def test_set_task_dates(self): @@ -2367,7 +2367,7 @@ def run_ti_and_assert(run_date, expected_start_date, expected_end_date, expected self.assertEqual(ti.start_date, expected_start_date) self.assertEqual(ti.end_date, expected_end_date) self.assertEqual(ti.duration, expected_duration) - trs = TR.find_for_task_instance(ti) + trs = TaskReschedule.find_for_task_instance(ti) self.assertEqual(len(trs), expected_task_reschedule_count) date1 = timezone.utcnow() diff --git a/tests/sensors/test_base_sensor.py b/tests/sensors/test_base_sensor.py index 451c95e5d11a4..7daf37dd4598c 100644 --- a/tests/sensors/test_base_sensor.py +++ b/tests/sensors/test_base_sensor.py @@ -23,7 +23,8 @@ from airflow import DAG, configuration, settings from airflow.exceptions import (AirflowSensorTimeout, AirflowException, AirflowRescheduleException) -from airflow.models import DagRun, TaskInstance, TaskReschedule +from airflow.models import DagRun, TaskInstance +from airflow.models.taskreschedule import TaskReschedule from airflow.operators.dummy_operator import DummyOperator from airflow.sensors.base_sensor_operator import BaseSensorOperator from airflow.ti_deps.deps.ready_to_reschedule import ReadyToRescheduleDep diff --git a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py index 9bae3498984f1..4bfcd6f12e3de 100644 --- a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py +++ b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py @@ -21,7 +21,8 @@ from datetime import timedelta from mock import Mock, patch -from airflow.models import TaskInstance, DAG, TaskReschedule +from airflow.models import TaskInstance, DAG +from airflow.models.taskreschedule import TaskReschedule from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.deps.ready_to_reschedule import ReadyToRescheduleDep from airflow.utils.state import State @@ -52,12 +53,12 @@ def test_should_pass_if_not_in_none_state(self): ti = self._get_task_instance(State.UP_FOR_RETRY) self.assertTrue(ReadyToRescheduleDep().is_met(ti=ti)) - @patch('airflow.models.TaskReschedule.find_for_task_instance', return_value=[]) + @patch('airflow.models.taskreschedule.TaskReschedule.find_for_task_instance', return_value=[]) def test_should_pass_if_no_reschedule_record_exists(self, find_for_task_instance): ti = self._get_task_instance(State.NONE) self.assertTrue(ReadyToRescheduleDep().is_met(ti=ti)) - @patch('airflow.models.TaskReschedule.find_for_task_instance') + @patch('airflow.models.taskreschedule.TaskReschedule.find_for_task_instance') def test_should_pass_after_reschedule_date_one(self, find_for_task_instance): find_for_task_instance.return_value = [ self._get_task_reschedule(utcnow() - timedelta(minutes=1)), @@ -65,7 +66,7 @@ def test_should_pass_after_reschedule_date_one(self, find_for_task_instance): ti = self._get_task_instance(State.UP_FOR_RESCHEDULE) self.assertTrue(ReadyToRescheduleDep().is_met(ti=ti)) - @patch('airflow.models.TaskReschedule.find_for_task_instance') + @patch('airflow.models.taskreschedule.TaskReschedule.find_for_task_instance') def test_should_pass_after_reschedule_date_multiple(self, find_for_task_instance): find_for_task_instance.return_value = [ self._get_task_reschedule(utcnow() - timedelta(minutes=21)), @@ -75,7 +76,7 @@ def test_should_pass_after_reschedule_date_multiple(self, find_for_task_instance ti = self._get_task_instance(State.UP_FOR_RESCHEDULE) self.assertTrue(ReadyToRescheduleDep().is_met(ti=ti)) - @patch('airflow.models.TaskReschedule.find_for_task_instance') + @patch('airflow.models.taskreschedule.TaskReschedule.find_for_task_instance') def test_should_fail_before_reschedule_date_one(self, find_for_task_instance): find_for_task_instance.return_value = [ self._get_task_reschedule(utcnow() + timedelta(minutes=1)), @@ -83,7 +84,7 @@ def test_should_fail_before_reschedule_date_one(self, find_for_task_instance): ti = self._get_task_instance(State.UP_FOR_RESCHEDULE) self.assertFalse(ReadyToRescheduleDep().is_met(ti=ti)) - @patch('airflow.models.TaskReschedule.find_for_task_instance') + @patch('airflow.models.taskreschedule.TaskReschedule.find_for_task_instance') def test_should_fail_before_reschedule_date_multiple(self, find_for_task_instance): find_for_task_instance.return_value = [ self._get_task_reschedule(utcnow() - timedelta(minutes=19)), From 0fef65a10f0e1b25a572cac0c390451d0fbe2884 Mon Sep 17 00:00:00 2001 From: Tao Feng Date: Wed, 30 Jan 2019 22:50:13 -0800 Subject: [PATCH 0037/1104] [AIRFLOW-XXX] Add a doc about fab security (#4595) --- docs/security.rst | 149 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 149 insertions(+) diff --git a/docs/security.rst b/docs/security.rst index 291085c4aae8e..d23e84e43feef 100644 --- a/docs/security.rst +++ b/docs/security.rst @@ -424,3 +424,152 @@ command, or as a configuration item in your ``airflow.cfg``. For both cases, ple [celery] flower_basic_auth = user1:password1,user2:password2 + + +RBAC UI Security +---------------- + +Security of Airflow Webserver UI is handled by Flask AppBuilder (FAB). +Please read its related `security document `_ +regarding its security model. + +Default Roles +''''''''''''' +Airflow ships with a set of roles by default: Admin, User, Op, Viewer, and Public. +Only ``Admin`` users could configure/alter the permissions for other roles. But it is not recommended +that ``Admin`` users alter these default roles in any way by removing +or adding permissions to these roles. + +Admin +^^^^^ +``Admin`` users have all possible permissions, including granting or revoking permissions from +other users. + +Public +^^^^^^ +``Public`` users (anonymous) don't have any permissions. + +Viewer +^^^^^^ +``Viewer`` users have limited viewer permissions + +.. code:: python + + VIEWER_PERMS = { + 'menu_access', + 'can_index', + 'can_list', + 'can_show', + 'can_chart', + 'can_dag_stats', + 'can_dag_details', + 'can_task_stats', + 'can_code', + 'can_log', + 'can_get_logs_with_metadata', + 'can_tries', + 'can_graph', + 'can_tree', + 'can_task', + 'can_task_instances', + 'can_xcom', + 'can_gantt', + 'can_landing_times', + 'can_duration', + 'can_blocked', + 'can_rendered', + 'can_pickle_info', + 'can_version', + } + +on limited web views + +.. code:: python + + VIEWER_VMS = { + 'Airflow', + 'DagModelView', + 'Browse', + 'DAG Runs', + 'DagRunModelView', + 'Task Instances', + 'TaskInstanceModelView', + 'SLA Misses', + 'SlaMissModelView', + 'Jobs', + 'JobModelView', + 'Logs', + 'LogModelView', + 'Docs', + 'Documentation', + 'Github', + 'About', + 'Version', + 'VersionView', + } + +User +^^^^ +``User`` users have ``Viewer`` permissions plus additional user permissions + +.. code:: python + + USER_PERMS = { + 'can_dagrun_clear', + 'can_run', + 'can_trigger', + 'can_add', + 'can_edit', + 'can_delete', + 'can_paused', + 'can_refresh', + 'can_success', + 'muldelete', + 'set_failed', + 'set_running', + 'set_success', + 'clear', + 'can_clear', + } + + +on User web views which is the same as Viewer web views. + +Op +^^ +``Op`` users have ``User`` permissions plus additional op permissions + +.. code:: python + + OP_PERMS = { + 'can_conf', + 'can_varimport', + } + +on ``User`` web views plus these additional op web views + +.. code:: python + + OP_VMS = { + 'Admin', + 'Configurations', + 'ConfigurationView', + 'Connections', + 'ConnectionModelView', + 'Pools', + 'PoolModelView', + 'Variables', + 'VariableModelView', + 'XComs', + 'XComModelView', + } + +Custom Roles +''''''''''''' + +DAG Level Role +^^^^^^^^^^^^^^ +``Admin`` can create a set of roles which are only allowed to view a certain set of dags. This is called DAG level access. Each dag defined in the dag model table +is treated as a ``View`` which has two permissions associated with it (``can_dag_read`` and ``can_dag_edit``). There is a special view called ``all_dags`` which +allows the role to access all the dags. The default ``Admin``, ``Viewer``, ``User``, ``Op`` roles can all access ``all_dags`` view. + From 05d43516fb15a7bd62f424483bbfb7422849a5b7 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 31 Jan 2019 07:59:49 +0100 Subject: [PATCH 0038/1104] [AIRFLOW-2876] Update Tenacity to 4.12 (#3723) Tenacity 4.8 is not python 3.7 compatible because it contains reserved keywords in the code --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index afee79651f1f4..1cbe29b64ad05 100644 --- a/setup.py +++ b/setup.py @@ -318,7 +318,7 @@ def do_setup(): 'setproctitle>=1.1.8, <2', 'sqlalchemy>=1.1.15, <1.3.0', 'tabulate>=0.7.5, <=0.8.2', - 'tenacity==4.8.0', + 'tenacity==4.12.0', 'text-unidecode==1.2', # Avoid GPL dependency, pip uses reverse order(!) 'thrift>=0.9.2', 'tzlocal>=1.4', From e1c0433127d0a19235b12a966af2dbd8016c53c8 Mon Sep 17 00:00:00 2001 From: andyh1203 Date: Wed, 30 Jan 2019 23:37:35 -0800 Subject: [PATCH 0039/1104] [AIRFLOW-3471] Move XCom out of models.py (#4629) --- airflow/models/__init__.py | 199 +------------------------------- airflow/models/xcom.py | 225 +++++++++++++++++++++++++++++++++++++ airflow/www/views.py | 5 +- tests/models.py | 2 +- 4 files changed, 232 insertions(+), 199 deletions(-) create mode 100644 airflow/models/xcom.py diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 83c38adbdbb98..d059246e055ad 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -63,7 +63,7 @@ from urllib.parse import quote from sqlalchemy import ( - Boolean, Column, DateTime, Float, Index, Integer, LargeBinary, PickleType, String, + Boolean, Column, DateTime, Float, Index, Integer, PickleType, String, Text, UniqueConstraint, and_, func, or_, true as sqltrue ) from sqlalchemy.ext.declarative import declared_attr @@ -85,6 +85,7 @@ from airflow.lineage import apply_lineage, prepare_lineage from airflow.models.dagpickle import DagPickle from airflow.models.taskreschedule import TaskReschedule +from airflow.models.xcom import XCom from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep @@ -96,8 +97,7 @@ from airflow.utils.db import provide_session from airflow.utils.decorators import apply_defaults from airflow.utils.email import send_email -from airflow.utils.helpers import ( - as_tuple, is_container, validate_key, pprinttable) +from airflow.utils.helpers import is_container, validate_key, pprinttable from airflow.utils.operator_resources import Resources from airflow.utils.state import State from airflow.utils.sqlalchemy import UtcDateTime, Interval @@ -4377,199 +4377,6 @@ def rotate_fernet_key(self): self._val = fernet.rotate(self._val.encode('utf-8')).decode() -class XCom(Base, LoggingMixin): - """ - Base class for XCom objects. - """ - __tablename__ = "xcom" - - id = Column(Integer, primary_key=True) - key = Column(String(512)) - value = Column(LargeBinary) - timestamp = Column( - UtcDateTime, default=timezone.utcnow, nullable=False) - execution_date = Column(UtcDateTime, nullable=False) - - # source information - task_id = Column(String(ID_LEN), nullable=False) - dag_id = Column(String(ID_LEN), nullable=False) - - __table_args__ = ( - Index('idx_xcom_dag_task_date', dag_id, task_id, execution_date, unique=False), - ) - - """ - TODO: "pickling" has been deprecated and JSON is preferred. - "pickling" will be removed in Airflow 2.0. - """ - @reconstructor - def init_on_load(self): - enable_pickling = configuration.getboolean('core', 'enable_xcom_pickling') - if enable_pickling: - self.value = pickle.loads(self.value) - else: - try: - self.value = json.loads(self.value.decode('UTF-8')) - except (UnicodeEncodeError, ValueError): - # For backward-compatibility. - # Preventing errors in webserver - # due to XComs mixed with pickled and unpickled. - self.value = pickle.loads(self.value) - - def __repr__(self): - return ''.format( - key=self.key, - task_id=self.task_id, - execution_date=self.execution_date) - - @classmethod - @provide_session - def set( - cls, - key, - value, - execution_date, - task_id, - dag_id, - session=None): - """ - Store an XCom value. - TODO: "pickling" has been deprecated and JSON is preferred. - "pickling" will be removed in Airflow 2.0. - - :return: None - """ - session.expunge_all() - - enable_pickling = configuration.getboolean('core', 'enable_xcom_pickling') - if enable_pickling: - value = pickle.dumps(value) - else: - try: - value = json.dumps(value).encode('UTF-8') - except ValueError: - log = LoggingMixin().log - log.error("Could not serialize the XCOM value into JSON. " - "If you are using pickles instead of JSON " - "for XCOM, then you need to enable pickle " - "support for XCOM in your airflow config.") - raise - - # remove any duplicate XComs - session.query(cls).filter( - cls.key == key, - cls.execution_date == execution_date, - cls.task_id == task_id, - cls.dag_id == dag_id).delete() - - session.commit() - - # insert new XCom - session.add(XCom( - key=key, - value=value, - execution_date=execution_date, - task_id=task_id, - dag_id=dag_id)) - - session.commit() - - @classmethod - @provide_session - def get_one(cls, - execution_date, - key=None, - task_id=None, - dag_id=None, - include_prior_dates=False, - session=None): - """ - Retrieve an XCom value, optionally meeting certain criteria. - TODO: "pickling" has been deprecated and JSON is preferred. - "pickling" will be removed in Airflow 2.0. - - :return: XCom value - """ - filters = [] - if key: - filters.append(cls.key == key) - if task_id: - filters.append(cls.task_id == task_id) - if dag_id: - filters.append(cls.dag_id == dag_id) - if include_prior_dates: - filters.append(cls.execution_date <= execution_date) - else: - filters.append(cls.execution_date == execution_date) - - query = ( - session.query(cls.value).filter(and_(*filters)) - .order_by(cls.execution_date.desc(), cls.timestamp.desc())) - - result = query.first() - if result: - enable_pickling = configuration.getboolean('core', 'enable_xcom_pickling') - if enable_pickling: - return pickle.loads(result.value) - else: - try: - return json.loads(result.value.decode('UTF-8')) - except ValueError: - log = LoggingMixin().log - log.error("Could not deserialize the XCOM value from JSON. " - "If you are using pickles instead of JSON " - "for XCOM, then you need to enable pickle " - "support for XCOM in your airflow config.") - raise - - @classmethod - @provide_session - def get_many(cls, - execution_date, - key=None, - task_ids=None, - dag_ids=None, - include_prior_dates=False, - limit=100, - session=None): - """ - Retrieve an XCom value, optionally meeting certain criteria - TODO: "pickling" has been deprecated and JSON is preferred. - "pickling" will be removed in Airflow 2.0. - """ - filters = [] - if key: - filters.append(cls.key == key) - if task_ids: - filters.append(cls.task_id.in_(as_tuple(task_ids))) - if dag_ids: - filters.append(cls.dag_id.in_(as_tuple(dag_ids))) - if include_prior_dates: - filters.append(cls.execution_date <= execution_date) - else: - filters.append(cls.execution_date == execution_date) - - query = ( - session.query(cls).filter(and_(*filters)) - .order_by(cls.execution_date.desc(), cls.timestamp.desc()) - .limit(limit)) - results = query.all() - return results - - @classmethod - @provide_session - def delete(cls, xcoms, session=None): - if isinstance(xcoms, XCom): - xcoms = [xcoms] - for xcom in xcoms: - if not isinstance(xcom, XCom): - raise TypeError( - 'Expected XCom; received {}'.format(xcom.__class__.__name__) - ) - session.delete(xcom) - session.commit() - - class DagRun(Base, LoggingMixin): """ DagRun describes an instance of a Dag. It can be created diff --git a/airflow/models/xcom.py b/airflow/models/xcom.py new file mode 100644 index 0000000000000..f097b49a8c1ba --- /dev/null +++ b/airflow/models/xcom.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json +import pickle + +from sqlalchemy import Column, Integer, String, Index, LargeBinary, and_ +from sqlalchemy.orm import reconstructor + +from airflow import configuration +from airflow.models.base import Base, ID_LEN +from airflow.utils import timezone +from airflow.utils.db import provide_session +from airflow.utils.helpers import as_tuple +from airflow.utils.log.logging_mixin import LoggingMixin +from airflow.utils.sqlalchemy import UtcDateTime + + +class XCom(Base, LoggingMixin): + """ + Base class for XCom objects. + """ + __tablename__ = "xcom" + + id = Column(Integer, primary_key=True) + key = Column(String(512)) + value = Column(LargeBinary) + timestamp = Column( + UtcDateTime, default=timezone.utcnow, nullable=False) + execution_date = Column(UtcDateTime, nullable=False) + + # source information + task_id = Column(String(ID_LEN), nullable=False) + dag_id = Column(String(ID_LEN), nullable=False) + + __table_args__ = ( + Index('idx_xcom_dag_task_date', dag_id, task_id, execution_date, unique=False), + ) + + """ + TODO: "pickling" has been deprecated and JSON is preferred. + "pickling" will be removed in Airflow 2.0. + """ + @reconstructor + def init_on_load(self): + enable_pickling = configuration.getboolean('core', 'enable_xcom_pickling') + if enable_pickling: + self.value = pickle.loads(self.value) + else: + try: + self.value = json.loads(self.value.decode('UTF-8')) + except (UnicodeEncodeError, ValueError): + # For backward-compatibility. + # Preventing errors in webserver + # due to XComs mixed with pickled and unpickled. + self.value = pickle.loads(self.value) + + def __repr__(self): + return ''.format( + key=self.key, + task_id=self.task_id, + execution_date=self.execution_date) + + @classmethod + @provide_session + def set( + cls, + key, + value, + execution_date, + task_id, + dag_id, + session=None): + """ + Store an XCom value. + TODO: "pickling" has been deprecated and JSON is preferred. + "pickling" will be removed in Airflow 2.0. + + :return: None + """ + session.expunge_all() + + enable_pickling = configuration.getboolean('core', 'enable_xcom_pickling') + if enable_pickling: + value = pickle.dumps(value) + else: + try: + value = json.dumps(value).encode('UTF-8') + except ValueError: + log = LoggingMixin().log + log.error("Could not serialize the XCOM value into JSON. " + "If you are using pickles instead of JSON " + "for XCOM, then you need to enable pickle " + "support for XCOM in your airflow config.") + raise + + # remove any duplicate XComs + session.query(cls).filter( + cls.key == key, + cls.execution_date == execution_date, + cls.task_id == task_id, + cls.dag_id == dag_id).delete() + + session.commit() + + # insert new XCom + session.add(XCom( + key=key, + value=value, + execution_date=execution_date, + task_id=task_id, + dag_id=dag_id)) + + session.commit() + + @classmethod + @provide_session + def get_one(cls, + execution_date, + key=None, + task_id=None, + dag_id=None, + include_prior_dates=False, + session=None): + """ + Retrieve an XCom value, optionally meeting certain criteria. + TODO: "pickling" has been deprecated and JSON is preferred. + "pickling" will be removed in Airflow 2.0. + + :return: XCom value + """ + filters = [] + if key: + filters.append(cls.key == key) + if task_id: + filters.append(cls.task_id == task_id) + if dag_id: + filters.append(cls.dag_id == dag_id) + if include_prior_dates: + filters.append(cls.execution_date <= execution_date) + else: + filters.append(cls.execution_date == execution_date) + + query = ( + session.query(cls.value).filter(and_(*filters)) + .order_by(cls.execution_date.desc(), cls.timestamp.desc())) + + result = query.first() + if result: + enable_pickling = configuration.getboolean('core', 'enable_xcom_pickling') + if enable_pickling: + return pickle.loads(result.value) + else: + try: + return json.loads(result.value.decode('UTF-8')) + except ValueError: + log = LoggingMixin().log + log.error("Could not deserialize the XCOM value from JSON. " + "If you are using pickles instead of JSON " + "for XCOM, then you need to enable pickle " + "support for XCOM in your airflow config.") + raise + + @classmethod + @provide_session + def get_many(cls, + execution_date, + key=None, + task_ids=None, + dag_ids=None, + include_prior_dates=False, + limit=100, + session=None): + """ + Retrieve an XCom value, optionally meeting certain criteria + TODO: "pickling" has been deprecated and JSON is preferred. + "pickling" will be removed in Airflow 2.0. + """ + filters = [] + if key: + filters.append(cls.key == key) + if task_ids: + filters.append(cls.task_id.in_(as_tuple(task_ids))) + if dag_ids: + filters.append(cls.dag_id.in_(as_tuple(dag_ids))) + if include_prior_dates: + filters.append(cls.execution_date <= execution_date) + else: + filters.append(cls.execution_date == execution_date) + + query = ( + session.query(cls).filter(and_(*filters)) + .order_by(cls.execution_date.desc(), cls.timestamp.desc()) + .limit(limit)) + results = query.all() + return results + + @classmethod + @provide_session + def delete(cls, xcoms, session=None): + if isinstance(xcoms, XCom): + xcoms = [xcoms] + for xcom in xcoms: + if not isinstance(xcom, XCom): + raise TypeError( + 'Expected XCom; received {}'.format(xcom.__class__.__name__) + ) + session.delete(xcom) + session.commit() diff --git a/airflow/www/views.py b/airflow/www/views.py index 000c6f479208c..9cd6437839834 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -54,9 +54,10 @@ from airflow import settings from airflow.api.common.experimental.mark_tasks import (set_dag_run_state_to_success, set_dag_run_state_to_failed) -from airflow.models import XCom, DagRun, errors +from airflow.models import DagRun, errors from airflow.models.connection import Connection from airflow.models.slamiss import SlaMiss +from airflow.models.xcom import XCom from airflow.ti_deps.dep_context import DepContext, QUEUE_DEPS, SCHEDULER_DEPS from airflow.utils import timezone from airflow.utils.dates import infer_time_unit, scale_time_units @@ -1908,7 +1909,7 @@ class SlaMissModelView(AirflowModelView): class XComModelView(AirflowModelView): route_base = '/xcom' - datamodel = AirflowModelView.CustomSQLAInterface(models.XCom) + datamodel = AirflowModelView.CustomSQLAInterface(XCom) base_permissions = ['can_add', 'can_list', 'can_edit', 'can_delete'] diff --git a/tests/models.py b/tests/models.py index 8cd43b450e778..fb75e2419c503 100644 --- a/tests/models.py +++ b/tests/models.py @@ -50,11 +50,11 @@ from airflow.models import KubeResourceVersion, KubeWorkerIdentifier from airflow.models import SkipMixin from airflow.models import State as ST -from airflow.models import XCom from airflow.models import Variable from airflow.models import clear_task_instances from airflow.models.connection import Connection from airflow.models.taskreschedule import TaskReschedule +from airflow.models.xcom import XCom from airflow.operators.bash_operator import BashOperator from airflow.operators.dummy_operator import DummyOperator from airflow.operators.python_operator import PythonOperator From d126d9ef215222f618519eda79900399cef63e9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Thu, 31 Jan 2019 12:32:23 +0100 Subject: [PATCH 0040/1104] [AIRFLOW-3730] Standarization use of logs mechanisms (#4556) --- airflow/contrib/hooks/bigquery_hook.py | 2 +- airflow/contrib/hooks/databricks_hook.py | 4 +-- airflow/contrib/hooks/ftp_hook.py | 4 +-- airflow/contrib/hooks/gcp_api_base_hook.py | 3 +- airflow/contrib/hooks/jira_hook.py | 3 +- airflow/contrib/hooks/redis_hook.py | 3 +- airflow/contrib/hooks/salesforce_hook.py | 2 +- airflow/contrib/hooks/segment_hook.py | 4 +-- airflow/contrib/hooks/sftp_hook.py | 6 ++-- airflow/contrib/hooks/sqoop_hook.py | 3 +- airflow/contrib/hooks/ssh_hook.py | 3 +- airflow/contrib/hooks/winrm_hook.py | 3 +- airflow/contrib/operators/vertica_to_mysql.py | 29 +++++++++---------- airflow/contrib/sensors/sftp_sensor.py | 3 +- 14 files changed, 28 insertions(+), 44 deletions(-) diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py index e3075ba9b8573..9ec7e876afae7 100644 --- a/airflow/contrib/hooks/bigquery_hook.py +++ b/airflow/contrib/hooks/bigquery_hook.py @@ -44,7 +44,7 @@ from pandas_gbq.gbq import GbqConnector -class BigQueryHook(GoogleCloudBaseHook, DbApiHook, LoggingMixin): +class BigQueryHook(GoogleCloudBaseHook, DbApiHook): """ Interact with BigQuery. This hook uses the Google Cloud Platform connection. diff --git a/airflow/contrib/hooks/databricks_hook.py b/airflow/contrib/hooks/databricks_hook.py index f4a890ac7ba4a..a83e577e6aa3a 100644 --- a/airflow/contrib/hooks/databricks_hook.py +++ b/airflow/contrib/hooks/databricks_hook.py @@ -26,8 +26,6 @@ from requests.auth import AuthBase from time import sleep -from airflow.utils.log.logging_mixin import LoggingMixin - try: from urllib import parse as urlparse except ImportError: @@ -44,7 +42,7 @@ USER_AGENT_HEADER = {'user-agent': 'airflow-{v}'.format(v=__version__)} -class DatabricksHook(BaseHook, LoggingMixin): +class DatabricksHook(BaseHook): """ Interact with Databricks. """ diff --git a/airflow/contrib/hooks/ftp_hook.py b/airflow/contrib/hooks/ftp_hook.py index 7d3cbd12320b7..494d3d1d4c628 100644 --- a/airflow/contrib/hooks/ftp_hook.py +++ b/airflow/contrib/hooks/ftp_hook.py @@ -24,8 +24,6 @@ from airflow.hooks.base_hook import BaseHook from past.builtins import basestring -from airflow.utils.log.logging_mixin import LoggingMixin - def mlsd(conn, path="", facts=None): """ @@ -60,7 +58,7 @@ def mlsd(conn, path="", facts=None): yield (name, entry) -class FTPHook(BaseHook, LoggingMixin): +class FTPHook(BaseHook): """ Interact with FTP. diff --git a/airflow/contrib/hooks/gcp_api_base_hook.py b/airflow/contrib/hooks/gcp_api_base_hook.py index 596209fb542ef..f24ad48c28415 100644 --- a/airflow/contrib/hooks/gcp_api_base_hook.py +++ b/airflow/contrib/hooks/gcp_api_base_hook.py @@ -29,7 +29,6 @@ from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook -from airflow.utils.log.logging_mixin import LoggingMixin _DEFAULT_SCOPES = ('https://www.googleapis.com/auth/cloud-platform',) @@ -39,7 +38,7 @@ _G_APP_CRED_ENV_VAR = "GOOGLE_APPLICATION_CREDENTIALS" -class GoogleCloudBaseHook(BaseHook, LoggingMixin): +class GoogleCloudBaseHook(BaseHook): """ A base hook for Google cloud-related hooks. Google cloud has a shared REST API client that is built in the same way no matter which service you use. diff --git a/airflow/contrib/hooks/jira_hook.py b/airflow/contrib/hooks/jira_hook.py index a56c414dceb54..e3c4a12ffe69b 100644 --- a/airflow/contrib/hooks/jira_hook.py +++ b/airflow/contrib/hooks/jira_hook.py @@ -21,10 +21,9 @@ from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook -from airflow.utils.log.logging_mixin import LoggingMixin -class JiraHook(BaseHook, LoggingMixin): +class JiraHook(BaseHook): """ Jira interaction hook, a Wrapper around JIRA Python SDK. diff --git a/airflow/contrib/hooks/redis_hook.py b/airflow/contrib/hooks/redis_hook.py index a34e8807967ad..6d864ed420fb1 100644 --- a/airflow/contrib/hooks/redis_hook.py +++ b/airflow/contrib/hooks/redis_hook.py @@ -22,10 +22,9 @@ """ from redis import StrictRedis from airflow.hooks.base_hook import BaseHook -from airflow.utils.log.logging_mixin import LoggingMixin -class RedisHook(BaseHook, LoggingMixin): +class RedisHook(BaseHook): """ Wrapper for connection to interact with Redis in-memory data structure store """ diff --git a/airflow/contrib/hooks/salesforce_hook.py b/airflow/contrib/hooks/salesforce_hook.py index 285b3cc3f89c2..ba5c7e8d9a4d8 100644 --- a/airflow/contrib/hooks/salesforce_hook.py +++ b/airflow/contrib/hooks/salesforce_hook.py @@ -37,7 +37,7 @@ from airflow.utils.log.logging_mixin import LoggingMixin -class SalesforceHook(BaseHook, LoggingMixin): +class SalesforceHook(BaseHook): def __init__( self, conn_id, diff --git a/airflow/contrib/hooks/segment_hook.py b/airflow/contrib/hooks/segment_hook.py index 01613f2adee8e..a072a9f1eaf99 100644 --- a/airflow/contrib/hooks/segment_hook.py +++ b/airflow/contrib/hooks/segment_hook.py @@ -29,10 +29,8 @@ from airflow.hooks.base_hook import BaseHook from airflow.exceptions import AirflowException -from airflow.utils.log.logging_mixin import LoggingMixin - -class SegmentHook(BaseHook, LoggingMixin): +class SegmentHook(BaseHook): def __init__( self, segment_conn_id='segment_default', diff --git a/airflow/contrib/hooks/sftp_hook.py b/airflow/contrib/hooks/sftp_hook.py index a6a59920f2b0b..6fc01babe5ab2 100644 --- a/airflow/contrib/hooks/sftp_hook.py +++ b/airflow/contrib/hooks/sftp_hook.py @@ -19,7 +19,6 @@ import stat import pysftp -import logging import datetime from airflow.contrib.hooks.ssh_hook import SSHHook @@ -183,10 +182,9 @@ def retrieve_file(self, remote_full_path, local_full_path): :type local_full_path: str """ conn = self.get_conn() - logging.info('Retrieving file from FTP: {}'.format(remote_full_path)) + self.log.info('Retrieving file from FTP: %s', remote_full_path) conn.get(remote_full_path, local_full_path) - logging.info('Finished retrieving file from FTP: {}'.format( - remote_full_path)) + self.log.info('Finished retrieving file from FTP: %s', remote_full_path) def store_file(self, remote_full_path, local_full_path): """ diff --git a/airflow/contrib/hooks/sqoop_hook.py b/airflow/contrib/hooks/sqoop_hook.py index f4bad83144a4c..def48824043e5 100644 --- a/airflow/contrib/hooks/sqoop_hook.py +++ b/airflow/contrib/hooks/sqoop_hook.py @@ -25,11 +25,10 @@ from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook -from airflow.utils.log.logging_mixin import LoggingMixin from copy import deepcopy -class SqoopHook(BaseHook, LoggingMixin): +class SqoopHook(BaseHook): """ This hook is a wrapper around the sqoop 1 binary. To be able to use the hook it is required that "sqoop" is in the PATH. diff --git a/airflow/contrib/hooks/ssh_hook.py b/airflow/contrib/hooks/ssh_hook.py index 97e93bbc603f9..a098234d90362 100755 --- a/airflow/contrib/hooks/ssh_hook.py +++ b/airflow/contrib/hooks/ssh_hook.py @@ -27,10 +27,9 @@ from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook -from airflow.utils.log.logging_mixin import LoggingMixin -class SSHHook(BaseHook, LoggingMixin): +class SSHHook(BaseHook): """ Hook for ssh remote execution using Paramiko. ref: https://github.com/paramiko/paramiko diff --git a/airflow/contrib/hooks/winrm_hook.py b/airflow/contrib/hooks/winrm_hook.py index 5cc24cb7ad75f..336dc2d818c62 100644 --- a/airflow/contrib/hooks/winrm_hook.py +++ b/airflow/contrib/hooks/winrm_hook.py @@ -24,10 +24,9 @@ from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook -from airflow.utils.log.logging_mixin import LoggingMixin -class WinRMHook(BaseHook, LoggingMixin): +class WinRMHook(BaseHook): """ Hook for winrm remote execution using pywinrm. diff --git a/airflow/contrib/operators/vertica_to_mysql.py b/airflow/contrib/operators/vertica_to_mysql.py index 9d3fe721e4cbb..99b55e61bc6e0 100644 --- a/airflow/contrib/operators/vertica_to_mysql.py +++ b/airflow/contrib/operators/vertica_to_mysql.py @@ -17,7 +17,6 @@ # specific language governing permissions and limitations # under the License. -import logging import MySQLdb from airflow.contrib.hooks.vertica_hook import VerticaHook @@ -103,10 +102,10 @@ def execute(self, context): if self.bulk_load: tmpfile = NamedTemporaryFile("w") - logging.info( - "Selecting rows from Vertica to local file " + str( - tmpfile.name) + "...") - logging.info(self.sql) + self.log.info( + "Selecting rows from Vertica to local file %s...", + tmpfile.name) + self.log.info(self.sql) csv_writer = csv.writer(tmpfile, delimiter='\t', encoding='utf-8') for row in cursor.iterate(): @@ -115,21 +114,21 @@ def execute(self, context): tmpfile.flush() else: - logging.info("Selecting rows from Vertica...") - logging.info(self.sql) + self.log.info("Selecting rows from Vertica...") + self.log.info(self.sql) result = cursor.fetchall() count = len(result) - logging.info("Selected rows from Vertica " + str(count)) + self.log.info("Selected rows from Vertica %s", count) if self.mysql_preoperator: - logging.info("Running MySQL preoperator...") + self.log.info("Running MySQL preoperator...") mysql.run(self.mysql_preoperator) try: if self.bulk_load: - logging.info("Bulk inserting rows into MySQL...") + self.log.info("Bulk inserting rows into MySQL...") with closing(mysql.get_conn()) as conn: with closing(conn.cursor()) as cursor: cursor.execute("LOAD DATA LOCAL INFILE '%s' INTO " @@ -140,17 +139,17 @@ def execute(self, context): conn.commit() tmpfile.close() else: - logging.info("Inserting rows into MySQL...") + self.log.info("Inserting rows into MySQL...") mysql.insert_rows(table=self.mysql_table, rows=result, target_fields=selected_columns) - logging.info("Inserted rows into MySQL " + str(count)) + self.log.info("Inserted rows into MySQL %s", count) except (MySQLdb.Error, MySQLdb.Warning): - logging.error("Inserted rows into MySQL 0") + self.log.info("Inserted rows into MySQL 0") raise if self.mysql_postoperator: - logging.info("Running MySQL postoperator...") + self.log.info("Running MySQL postoperator...") mysql.run(self.mysql_postoperator) - logging.info("Done") + self.log.info("Done") diff --git a/airflow/contrib/sensors/sftp_sensor.py b/airflow/contrib/sensors/sftp_sensor.py index 40ff6c9cf2868..f992f7d9092f2 100644 --- a/airflow/contrib/sensors/sftp_sensor.py +++ b/airflow/contrib/sensors/sftp_sensor.py @@ -17,7 +17,6 @@ # specific language governing permissions and limitations # under the License. -import logging from paramiko import SFTP_NO_SUCH_FILE from airflow.contrib.hooks.sftp_hook import SFTPHook from airflow.sensors.base_sensor_operator import BaseSensorOperator @@ -42,7 +41,7 @@ def __init__(self, path, sftp_conn_id='sftp_default', *args, **kwargs): self.hook = SFTPHook(sftp_conn_id) def poke(self, context): - logging.info('Poking for %s', self.path) + self.log.info('Poking for %s', self.path) try: self.hook.get_mod_time(self.path) except IOError as e: From 11f5032527dd5dc854b32cb5fef4651ea5522d43 Mon Sep 17 00:00:00 2001 From: Xiaodong Date: Thu, 31 Jan 2019 22:12:57 +0800 Subject: [PATCH 0041/1104] [AIRFLOW-3782] Clarify docs around celery worker_autoscale in default_airflow.cfg (#4609) Celery supports `autoscale` by accepting values in format "max_concurrency,min_concurrency". But the default value in default_airflow.cfg is wrong, and the comment can be clearer. --- airflow/config_templates/default_airflow.cfg | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/airflow/config_templates/default_airflow.cfg b/airflow/config_templates/default_airflow.cfg index 2069f933b3e71..c694df2512cfa 100644 --- a/airflow/config_templates/default_airflow.cfg +++ b/airflow/config_templates/default_airflow.cfg @@ -351,12 +351,13 @@ celery_app_name = airflow.executors.celery_executor # your worker box and the nature of your tasks worker_concurrency = 16 -# The minimum and maximum concurrency that will be used when starting workers with the -# "airflow worker" command. Pick these numbers based on resources on -# worker box and the nature of the task. If autoscale option is available worker_concurrency -# will be ignored. +# The maximum and minimum concurrency that will be used when starting workers with the +# "airflow worker" command (always keep minimum processes, but grow to maximum if necessary). +# Note the value should be "max_concurrency,min_concurrency" +# Pick these numbers based on resources on worker box and the nature of the task. +# If autoscale option is available, worker_concurrency will be ignored. # http://docs.celeryproject.org/en/latest/reference/celery.bin.worker.html#cmdoption-celery-worker-autoscale -# worker_autoscale = 12,16 +# worker_autoscale = 16,12 # When you start an airflow worker, airflow starts a tiny web server # subprocess to serve the workers local log files to the airflow main From ee5b8c268339860617de49ec998b16c9e9599fee Mon Sep 17 00:00:00 2001 From: Stefan Seelmann Date: Thu, 31 Jan 2019 21:01:52 +0100 Subject: [PATCH 0042/1104] [AIRFLOW-3461] Move TaskFail out of models.py (#4630) --- airflow/api/common/experimental/delete_dag.py | 3 +- airflow/models/__init__.py | 33 +---------- airflow/models/taskfail.py | 55 +++++++++++++++++++ airflow/www/views.py | 5 +- .../operators/test_bigquery_operator.py | 3 +- tests/contrib/sensors/test_weekday_sensor.py | 3 +- tests/core.py | 7 ++- tests/models.py | 3 +- 8 files changed, 71 insertions(+), 41 deletions(-) create mode 100755 airflow/models/taskfail.py diff --git a/airflow/api/common/experimental/delete_dag.py b/airflow/api/common/experimental/delete_dag.py index 6df6e0fe091ba..28d3ada2a5008 100644 --- a/airflow/api/common/experimental/delete_dag.py +++ b/airflow/api/common/experimental/delete_dag.py @@ -22,6 +22,7 @@ from sqlalchemy import or_ from airflow import models +from airflow.models.taskfail import TaskFail from airflow.utils.db import provide_session from airflow.exceptions import DagNotFound, DagFileExists @@ -57,7 +58,7 @@ def delete_dag(dag_id, keep_records_in_log=True, session=None): if dag.is_subdag: p, c = dag_id.rsplit(".", 1) - for m in models.DagRun, models.TaskFail, models.TaskInstance: + for m in models.DagRun, TaskFail, models.TaskInstance: count += session.query(m).filter(m.dag_id == p, m.task_id == c).delete() return count diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index d059246e055ad..c44859fd00b38 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -84,6 +84,7 @@ from airflow.dag.base_dag import BaseDag, BaseDagBag from airflow.lineage import apply_lineage, prepare_lineage from airflow.models.dagpickle import DagPickle +from airflow.models.taskfail import TaskFail from airflow.models.taskreschedule import TaskReschedule from airflow.models.xcom import XCom from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep @@ -1855,38 +1856,6 @@ def init_run_context(self, raw=False): self._set_context(self) -class TaskFail(Base): - """ - TaskFail tracks the failed run durations of each task instance. - """ - - __tablename__ = "task_fail" - - id = Column(Integer, primary_key=True) - task_id = Column(String(ID_LEN), nullable=False) - dag_id = Column(String(ID_LEN), nullable=False) - execution_date = Column(UtcDateTime, nullable=False) - start_date = Column(UtcDateTime) - end_date = Column(UtcDateTime) - duration = Column(Integer) - - __table_args__ = ( - Index('idx_task_fail_dag_task_date', dag_id, task_id, execution_date, - unique=False), - ) - - def __init__(self, task, execution_date, start_date, end_date): - self.dag_id = task.dag_id - self.task_id = task.task_id - self.execution_date = execution_date - self.start_date = start_date - self.end_date = end_date - if self.end_date and self.start_date: - self.duration = (self.end_date - self.start_date).total_seconds() - else: - self.duration = None - - class Log(Base): """ Used to actively log events to the database diff --git a/airflow/models/taskfail.py b/airflow/models/taskfail.py new file mode 100755 index 0000000000000..d9cc7af922730 --- /dev/null +++ b/airflow/models/taskfail.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from sqlalchemy import Column, Index, Integer, String + +from airflow.models.base import Base, ID_LEN +from airflow.utils.sqlalchemy import UtcDateTime + + +class TaskFail(Base): + """ + TaskFail tracks the failed run durations of each task instance. + """ + + __tablename__ = "task_fail" + + id = Column(Integer, primary_key=True) + task_id = Column(String(ID_LEN), nullable=False) + dag_id = Column(String(ID_LEN), nullable=False) + execution_date = Column(UtcDateTime, nullable=False) + start_date = Column(UtcDateTime) + end_date = Column(UtcDateTime) + duration = Column(Integer) + + __table_args__ = ( + Index('idx_task_fail_dag_task_date', dag_id, task_id, execution_date, + unique=False), + ) + + def __init__(self, task, execution_date, start_date, end_date): + self.dag_id = task.dag_id + self.task_id = task.task_id + self.execution_date = execution_date + self.start_date = start_date + self.end_date = end_date + if self.end_date and self.start_date: + self.duration = (self.end_date - self.start_date).total_seconds() + else: + self.duration = None diff --git a/airflow/www/views.py b/airflow/www/views.py index 9cd6437839834..819a5f68c991b 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -57,6 +57,7 @@ from airflow.models import DagRun, errors from airflow.models.connection import Connection from airflow.models.slamiss import SlaMiss +from airflow.models.taskfail import TaskFail from airflow.models.xcom import XCom from airflow.ti_deps.dep_context import DepContext, QUEUE_DEPS, SCHEDULER_DEPS from airflow.utils import timezone @@ -1427,7 +1428,7 @@ def duration(self, session=None): tis = dag.get_task_instances( session, start_date=min_date, end_date=base_date) - TF = models.TaskFail + TF = TaskFail ti_fails = ( session.query(TF) .filter(TF.dag_id == dag.dag_id, @@ -1722,7 +1723,7 @@ def gantt(self, session=None): ti for ti in dag.get_task_instances(session, dttm, dttm) if ti.start_date] tis = sorted(tis, key=lambda ti: ti.start_date) - TF = models.TaskFail + TF = TaskFail ti_fails = list(itertools.chain(*[( session .query(TF) diff --git a/tests/contrib/operators/test_bigquery_operator.py b/tests/contrib/operators/test_bigquery_operator.py index 304699410992a..de9a241f3b136 100644 --- a/tests/contrib/operators/test_bigquery_operator.py +++ b/tests/contrib/operators/test_bigquery_operator.py @@ -24,6 +24,7 @@ from airflow import configuration, models from airflow.models import TaskInstance, DAG +from airflow.models.taskfail import TaskFail from airflow.contrib.operators.bigquery_operator import \ BigQueryCreateExternalTableOperator, BigQueryCreateEmptyTableOperator, \ @@ -167,7 +168,7 @@ def tearDown(self): session = Session() session.query(models.TaskInstance).filter_by( dag_id=TEST_DAG_ID).delete() - session.query(models.TaskFail).filter_by( + session.query(TaskFail).filter_by( dag_id=TEST_DAG_ID).delete() session.commit() session.close() diff --git a/tests/contrib/sensors/test_weekday_sensor.py b/tests/contrib/sensors/test_weekday_sensor.py index 55a4c4da4bf7d..0255aa3a162fe 100644 --- a/tests/contrib/sensors/test_weekday_sensor.py +++ b/tests/contrib/sensors/test_weekday_sensor.py @@ -24,6 +24,7 @@ from airflow.contrib.utils.weekday import WeekDay from airflow.exceptions import AirflowSensorTimeout from airflow.models import DagBag +from airflow.models.taskfail import TaskFail from airflow.settings import Session from airflow.utils.timezone import datetime @@ -53,7 +54,7 @@ def tearDown(self): session = Session() session.query(models.TaskInstance).filter_by( dag_id=TEST_DAG_ID).delete() - session.query(models.TaskFail).filter_by( + session.query(TaskFail).filter_by( dag_id=TEST_DAG_ID).delete() session.commit() session.close() diff --git a/tests/core.py b/tests/core.py index 0db725d7770a6..ad68f7d88a615 100644 --- a/tests/core.py +++ b/tests/core.py @@ -50,6 +50,7 @@ from airflow import jobs, models, DAG, utils, macros, settings, exceptions from airflow.models import BaseOperator from airflow.models.connection import Connection +from airflow.models.taskfail import TaskFail from airflow.operators.bash_operator import BashOperator from airflow.operators.check_operator import CheckOperator, ValueCheckOperator from airflow.operators.dagrun_operator import TriggerDagRunOperator @@ -124,7 +125,7 @@ def tearDown(self): session = Session() session.query(models.TaskInstance).filter_by( dag_id=TEST_DAG_ID).delete() - session.query(models.TaskFail).filter_by( + session.query(TaskFail).filter_by( dag_id=TEST_DAG_ID).delete() session.commit() session.close() @@ -944,11 +945,11 @@ def test_task_fail_duration(self): f.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) except Exception: pass - p_fails = session.query(models.TaskFail).filter_by( + p_fails = session.query(TaskFail).filter_by( task_id='pass_sleepy', dag_id=self.dag.dag_id, execution_date=DEFAULT_DATE).all() - f_fails = session.query(models.TaskFail).filter_by( + f_fails = session.query(TaskFail).filter_by( task_id='fail_sleepy', dag_id=self.dag.dag_id, execution_date=DEFAULT_DATE).all() diff --git a/tests/models.py b/tests/models.py index fb75e2419c503..7b9a5d91d4068 100644 --- a/tests/models.py +++ b/tests/models.py @@ -53,6 +53,7 @@ from airflow.models import Variable from airflow.models import clear_task_instances from airflow.models.connection import Connection +from airflow.models.taskfail import TaskFail from airflow.models.taskreschedule import TaskReschedule from airflow.models.xcom import XCom from airflow.operators.bash_operator import BashOperator @@ -1945,7 +1946,7 @@ class TaskInstanceTest(unittest.TestCase): def tearDown(self): with create_session() as session: - session.query(models.TaskFail).delete() + session.query(TaskFail).delete() session.query(TaskReschedule).delete() session.query(models.TaskInstance).delete() From f07f3a8831ba18765816447c159888ad40b22cd5 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 1 Feb 2019 12:44:02 +0100 Subject: [PATCH 0043/1104] [AIRFLOW-XXX] The execution_date is Pendulum --- docs/code.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/code.rst b/docs/code.rst index 9fd0c3fb34d04..504b2970ef688 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -335,9 +335,9 @@ Variable Description ``{{ ts }}`` same as ``execution_date.isoformat()``. Example: ``2018-01-01T00:00:00+00:00`` ``{{ ts_nodash }}`` same as ``ts`` without ``-``, ``:`` and TimeZone info. Example: ``20180101T000000`` ``{{ ts_nodash_with_tz }}`` same as ``ts`` without ``-`` and ``:``. Example: ``20180101T000000+0000`` -``{{ execution_date }}`` the execution_date, (datetime.datetime) -``{{ prev_execution_date }}`` the previous execution date (if available) (datetime.datetime) -``{{ next_execution_date }}`` the next execution date (datetime.datetime) +``{{ execution_date }}`` the execution_date (pendulum.Pendulum) +``{{ prev_execution_date }}`` the previous execution date (if available) (pendulum.Pendulum) +``{{ next_execution_date }}`` the next execution date (pendulum.Pendulum) ``{{ dag }}`` the DAG object ``{{ task }}`` the Task object ``{{ macros }}`` a reference to the macros package, described below From 0f02e45b7e127da3539752607ab02347d9fcd733 Mon Sep 17 00:00:00 2001 From: andyh1203 Date: Fri, 1 Feb 2019 22:09:41 -0800 Subject: [PATCH 0044/1104] [AIRFLOW-3463] Move Log out of models.py (#4639) --- airflow/models/__init__.py | 45 +----------- airflow/models/log.py | 68 +++++++++++++++++++ airflow/utils/cli.py | 6 +- airflow/www/decorators.py | 4 +- airflow/www/views.py | 3 +- .../common/experimental/test_delete_dag.py | 2 +- 6 files changed, 77 insertions(+), 51 deletions(-) create mode 100644 airflow/models/log.py diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index c44859fd00b38..7ce38eb1d2094 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -84,6 +84,7 @@ from airflow.dag.base_dag import BaseDag, BaseDagBag from airflow.lineage import apply_lineage, prepare_lineage from airflow.models.dagpickle import DagPickle +from airflow.models.log import Log from airflow.models.taskfail import TaskFail from airflow.models.taskreschedule import TaskReschedule from airflow.models.xcom import XCom @@ -1856,50 +1857,6 @@ def init_run_context(self, raw=False): self._set_context(self) -class Log(Base): - """ - Used to actively log events to the database - """ - - __tablename__ = "log" - - id = Column(Integer, primary_key=True) - dttm = Column(UtcDateTime) - dag_id = Column(String(ID_LEN)) - task_id = Column(String(ID_LEN)) - event = Column(String(30)) - execution_date = Column(UtcDateTime) - owner = Column(String(500)) - extra = Column(Text) - - __table_args__ = ( - Index('idx_log_dag', dag_id), - ) - - def __init__(self, event, task_instance, owner=None, extra=None, **kwargs): - self.dttm = timezone.utcnow() - self.event = event - self.extra = extra - - task_owner = None - - if task_instance: - self.dag_id = task_instance.dag_id - self.task_id = task_instance.task_id - self.execution_date = task_instance.execution_date - task_owner = task_instance.task.owner - - if 'task_id' in kwargs: - self.task_id = kwargs['task_id'] - if 'dag_id' in kwargs: - self.dag_id = kwargs['dag_id'] - if 'execution_date' in kwargs: - if kwargs['execution_date']: - self.execution_date = kwargs['execution_date'] - - self.owner = owner or task_owner - - class SkipMixin(LoggingMixin): @provide_session def skip(self, dag_run, execution_date, tasks, session=None): diff --git a/airflow/models/log.py b/airflow/models/log.py new file mode 100644 index 0000000000000..fa3fed60ee240 --- /dev/null +++ b/airflow/models/log.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from sqlalchemy import Column, Integer, String, Text, Index + +from airflow.models.base import Base, ID_LEN +from airflow.utils import timezone +from airflow.utils.sqlalchemy import UtcDateTime + + +class Log(Base): + """ + Used to actively log events to the database + """ + + __tablename__ = "log" + + id = Column(Integer, primary_key=True) + dttm = Column(UtcDateTime) + dag_id = Column(String(ID_LEN)) + task_id = Column(String(ID_LEN)) + event = Column(String(30)) + execution_date = Column(UtcDateTime) + owner = Column(String(500)) + extra = Column(Text) + + __table_args__ = ( + Index('idx_log_dag', dag_id), + ) + + def __init__(self, event, task_instance, owner=None, extra=None, **kwargs): + self.dttm = timezone.utcnow() + self.event = event + self.extra = extra + + task_owner = None + + if task_instance: + self.dag_id = task_instance.dag_id + self.task_id = task_instance.task_id + self.execution_date = task_instance.execution_date + task_owner = task_instance.task.owner + + if 'task_id' in kwargs: + self.task_id = kwargs['task_id'] + if 'dag_id' in kwargs: + self.dag_id = kwargs['dag_id'] + if 'execution_date' in kwargs: + if kwargs['execution_date']: + self.execution_date = kwargs['execution_date'] + + self.owner = owner or task_owner diff --git a/airflow/utils/cli.py b/airflow/utils/cli.py index 32303cd90bd8d..6c3bf42c9458c 100644 --- a/airflow/utils/cli.py +++ b/airflow/utils/cli.py @@ -30,7 +30,7 @@ from argparse import Namespace from datetime import datetime -import airflow.models +from airflow.models.log import Log from airflow.utils import cli_action_loggers @@ -46,7 +46,7 @@ def action_logging(f): end_datetime : end datetime instance by utc full_command : full command line arguments user : current user - log : airflow.models.Log ORM instance + log : airflow.models.log.Log ORM instance dag_id : dag id (optional) task_id : task_id (optional) execution_date : execution date (optional) @@ -105,7 +105,7 @@ def _build_metrics(func_name, namespace): metrics['host_name'] = socket.gethostname() extra = json.dumps(dict((k, metrics[k]) for k in ('host_name', 'full_command'))) - log = airflow.models.Log( + log = Log( event='cli_{}'.format(func_name), task_instance=None, owner=metrics['user'], diff --git a/airflow/www/decorators.py b/airflow/www/decorators.py index 4ec2c7e8e9774..889a0b1b33b2f 100644 --- a/airflow/www/decorators.py +++ b/airflow/www/decorators.py @@ -22,7 +22,7 @@ import pendulum from io import BytesIO as IO from flask import after_this_request, redirect, request, url_for, g -from airflow import models +from airflow.models.log import Log from airflow.utils.db import create_session @@ -39,7 +39,7 @@ def wrapper(*args, **kwargs): else: user = g.user.username - log = models.Log( + log = Log( event=f.__name__, task_instance=None, owner=user, diff --git a/airflow/www/views.py b/airflow/www/views.py index 819a5f68c991b..0775d526e5b67 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -56,6 +56,7 @@ set_dag_run_state_to_failed) from airflow.models import DagRun, errors from airflow.models.connection import Connection +from airflow.models.log import Log from airflow.models.slamiss import SlaMiss from airflow.models.taskfail import TaskFail from airflow.models.xcom import XCom @@ -2279,7 +2280,7 @@ def action_set_success(self, drs, session=None): class LogModelView(AirflowModelView): route_base = '/log' - datamodel = AirflowModelView.CustomSQLAInterface(models.Log) + datamodel = AirflowModelView.CustomSQLAInterface(Log) base_permissions = ['can_list'] diff --git a/tests/api/common/experimental/test_delete_dag.py b/tests/api/common/experimental/test_delete_dag.py index 70a2692c249ba..0d64589baef79 100644 --- a/tests/api/common/experimental/test_delete_dag.py +++ b/tests/api/common/experimental/test_delete_dag.py @@ -30,7 +30,7 @@ DM = models.DagModel DR = models.DagRun TI = models.TaskInstance -LOG = models.Log +LOG = models.log.Log class TestDeleteDAGCatchError(unittest.TestCase): From 7c6ce873f21cef888c9af26e7fc65ab8647de38b Mon Sep 17 00:00:00 2001 From: gseva Date: Tue, 5 Feb 2019 15:30:04 -0300 Subject: [PATCH 0045/1104] [AIRFLOW-XXX] Fixed note in plugins.rst (#4649) Changing it to rst notation, so it stands out in read the docs. --- docs/plugins.rst | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/plugins.rst b/docs/plugins.rst index a4913dce27537..8bc8d9ab53166 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -232,11 +232,12 @@ It is possible to load plugins via `setuptools entrypoint Date: Tue, 5 Feb 2019 21:48:25 +0100 Subject: [PATCH 0046/1104] [AIRFLOW-XXX] Add missing class references to docs (#4644) --- .../contrib/operators/databricks_operator.py | 5 +- .../contrib/operators/s3_to_sftp_operator.py | 33 ++-- .../contrib/operators/sftp_to_s3_operator.py | 34 ++-- docs/code.rst | 150 ++++++++++-------- docs/integration.rst | 1 + 5 files changed, 126 insertions(+), 97 deletions(-) diff --git a/airflow/contrib/operators/databricks_operator.py b/airflow/contrib/operators/databricks_operator.py index 5fbc605abea60..a8742e401c4fe 100644 --- a/airflow/contrib/operators/databricks_operator.py +++ b/airflow/contrib/operators/databricks_operator.py @@ -37,7 +37,7 @@ def _deep_string_coerce(content, json_path='json'): function will throw if content contains non-string or non-numeric types. The reason why we have this function is because the ``self.json`` field must be a - dict with only string values. This is because ``render_template`` will fail + dict with only string values. This is because ``render_template`` will fail for numerical values. """ c = _deep_string_coerce @@ -302,6 +302,7 @@ class DatabricksRunNowOperator(BaseOperator): to call the ``api/2.0/jobs/run-now`` endpoint and pass it directly to our ``DatabricksRunNowOperator`` through the ``json`` parameter. For example :: + json = { "job_id": 42, "notebook_params": { @@ -349,6 +350,7 @@ class DatabricksRunNowOperator(BaseOperator): :param job_id: the job_id of the existing Databricks job. This field will be templated. + .. seealso:: https://docs.databricks.com/api/latest/jobs.html#run-now :type job_id: str @@ -396,6 +398,7 @@ class DatabricksRunNowOperator(BaseOperator): in job setting. The json representation of this field cannot exceed 10,000 bytes. This field will be templated. + .. seealso:: https://docs.databricks.com/api/latest/jobs.html#run-now :type spark_submit_params: array of strings diff --git a/airflow/contrib/operators/s3_to_sftp_operator.py b/airflow/contrib/operators/s3_to_sftp_operator.py index 422165bca7fed..43ef269032db8 100644 --- a/airflow/contrib/operators/s3_to_sftp_operator.py +++ b/airflow/contrib/operators/s3_to_sftp_operator.py @@ -27,22 +27,23 @@ class S3ToSFTPOperator(BaseOperator): """ - This operator enables the transferring of files from S3 to a SFTP server - :param sftp_conn_id: The sftp connection id. The name or - identifier for establishing a connection to the SFTP server. - :type sftp_conn_id: string - :param sftp_path: The sftp remote path. This is the specified - file path for uploading file to the SFTP server. - :type sftp_path: string - :param s3_conn_id: The s3 connection id. The name or identifier for establishing - a connection to S3 - :type s3_conn_id: string - :param s3_bucket: The targeted s3 bucket. This is the S3 bucket - from where the file is downloaded. - :type s3_bucket: string - :param s3_key: The targeted s3 key. This is the specified file path - for downloading the file from S3. - :type s3_key: string + This operator enables the transferring of files from S3 to a SFTP server. + + :param sftp_conn_id: The sftp connection id. The name or identifier for + establishing a connection to the SFTP server. + :type sftp_conn_id: string + :param sftp_path: The sftp remote path. This is the specified file path for + uploading file to the SFTP server. + :type sftp_path: string + :param s3_conn_id: The s3 connection id. The name or identifier for + establishing a connection to S3 + :type s3_conn_id: string + :param s3_bucket: The targeted s3 bucket. This is the S3 bucket from + where the file is downloaded. + :type s3_bucket: string + :param s3_key: The targeted s3 key. This is the specified file path for + downloading the file from S3. + :type s3_key: string """ template_fields = ('s3_key', 'sftp_path') diff --git a/airflow/contrib/operators/sftp_to_s3_operator.py b/airflow/contrib/operators/sftp_to_s3_operator.py index 0c94541b74771..cefc838cf12d1 100644 --- a/airflow/contrib/operators/sftp_to_s3_operator.py +++ b/airflow/contrib/operators/sftp_to_s3_operator.py @@ -27,22 +27,24 @@ class SFTPToS3Operator(BaseOperator): """ - This operator enables the transferring of files from a SFTP server to Amazon S3 - :param sftp_conn_id: The sftp connection id. The name or identifier for - establishing a connection to the SFTP server. - :type sftp_conn_id: string - :param sftp_path: The sftp remote path. This is the specified file - path for downloading the file from the SFTP server. - :type sftp_path: string - :param s3_conn_id: The s3 connection id. The name or identifier for - establishing a connection to S3 - :type s3_conn_id: string - :param s3_bucket: The targeted s3 bucket. This is the S3 bucket - to where the file is uploaded. - :type s3_bucket: string - :param s3_key: The targeted s3 key. This is the specified path - for uploading the file to S3. - :type s3_key: string + This operator enables the transferring of files from a SFTP server to + Amazon S3. + + :param sftp_conn_id: The sftp connection id. The name or identifier for + establishing a connection to the SFTP server. + :type sftp_conn_id: string + :param sftp_path: The sftp remote path. This is the specified file path + for downloading the file from the SFTP server. + :type sftp_path: string + :param s3_conn_id: The s3 connection id. The name or identifier for + establishing a connection to S3 + :type s3_conn_id: string + :param s3_bucket: The targeted s3 bucket. This is the S3 bucket to where + the file is uploaded. + :type s3_bucket: string + :param s3_key: The targeted s3 key. This is the specified path for + uploading the file to S3. + :type s3_key: string """ template_fields = ('s3_key', 'sftp_path') diff --git a/docs/code.rst b/docs/code.rst index 504b2970ef688..3aa926930062d 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -69,15 +69,15 @@ Operators .. autoclass:: airflow.operators.python_operator.BranchPythonOperator .. autoclass:: airflow.operators.check_operator.CheckOperator .. autoclass:: airflow.operators.docker_operator.DockerOperator -.. autoclass:: airflow.operators.dummy_operator.DummyOperator .. autoclass:: airflow.operators.druid_check_operator.DruidCheckOperator +.. autoclass:: airflow.operators.dummy_operator.DummyOperator .. autoclass:: airflow.operators.email_operator.EmailOperator .. autoclass:: airflow.operators.generic_transfer.GenericTransfer -.. autoclass:: airflow.operators.hive_to_druid.HiveToDruidTransfer -.. autoclass:: airflow.operators.hive_to_mysql.HiveToMySqlTransfer .. autoclass:: airflow.operators.hive_to_samba_operator.Hive2SambaOperator .. autoclass:: airflow.operators.hive_operator.HiveOperator .. autoclass:: airflow.operators.hive_stats_operator.HiveStatsCollectionOperator +.. autoclass:: airflow.operators.hive_to_druid.HiveToDruidTransfer +.. autoclass:: airflow.operators.hive_to_mysql.HiveToMySqlTransfer .. autoclass:: airflow.operators.check_operator.IntervalCheckOperator .. autoclass:: airflow.operators.jdbc_operator.JdbcOperator .. autoclass:: airflow.operators.latest_only_operator.LatestOnlyOperator @@ -94,6 +94,7 @@ Operators .. autoclass:: airflow.operators.presto_check_operator.PrestoValueCheckOperator .. autoclass:: airflow.operators.python_operator.PythonOperator .. autoclass:: airflow.operators.python_operator.PythonVirtualenvOperator +.. autoclass:: airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer .. autoclass:: airflow.operators.s3_file_transform_operator.S3FileTransformOperator .. autoclass:: airflow.operators.s3_to_hive_operator.S3ToHiveTransfer .. autoclass:: airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer @@ -105,7 +106,6 @@ Operators .. autoclass:: airflow.operators.subdag_operator.SubDagOperator .. autoclass:: airflow.operators.dagrun_operator.TriggerDagRunOperator .. autoclass:: airflow.operators.check_operator.ValueCheckOperator -.. autoclass:: airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer Sensors ^^^^^^^ @@ -118,8 +118,8 @@ Sensors .. autoclass:: airflow.sensors.s3_key_sensor.S3KeySensor .. autoclass:: airflow.sensors.s3_prefix_sensor.S3PrefixSensor .. autoclass:: airflow.sensors.sql_sensor.SqlSensor -.. autoclass:: airflow.sensors.time_sensor.TimeSensor .. autoclass:: airflow.sensors.time_delta_sensor.TimeDeltaSensor +.. autoclass:: airflow.sensors.time_sensor.TimeSensor .. autoclass:: airflow.sensors.web_hdfs_sensor.WebHdfsSensor Community-contributed Operators @@ -129,37 +129,61 @@ Operators ^^^^^^^^^ .. Alphabetize this list -.. autoclass:: airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator -.. autoclass:: airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator .. autoclass:: airflow.contrib.operators.aws_athena_operator.AWSAthenaOperator .. autoclass:: airflow.contrib.operators.awsbatch_operator.AWSBatchOperator +.. autoclass:: airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator +.. autoclass:: airflow.contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator .. autoclass:: airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator +.. autoclass:: airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator -.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator -.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator -.. autoclass:: airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator +.. autoclass:: airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator +.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator .. autoclass:: airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator .. autoclass:: airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator .. autoclass:: airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator +.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor .. autoclass:: airflow.contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator -.. autoclass:: airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlBaseOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator +.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator .. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator .. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator -.. autoclass:: airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHiveOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPigOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator +.. autoclass:: airflow.contrib.operators.databricks_operator.DatabricksRunNowOperator +.. autoclass:: airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator +.. autoclass:: airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator +.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateBaseOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator .. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator @@ -173,43 +197,22 @@ Operators .. autoclass:: airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator .. autoclass:: airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator .. autoclass:: airflow.contrib.operators.file_to_wasb.FileToWasbOperator -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEPodOperator .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceBaseOperator .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator -.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator -.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator -.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEPodOperator .. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator .. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlBaseOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator .. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator -.. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator +.. autoclass:: airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator .. autoclass:: airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator .. autoclass:: airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator -.. autoclass:: airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator +.. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator .. autoclass:: airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator .. autoclass:: airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator .. autoclass:: airflow.contrib.operators.gcs_to_gcs_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator @@ -236,13 +239,17 @@ Operators .. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubTopicCreateOperator .. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubTopicDeleteOperator .. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator -.. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator .. autoclass:: airflow.contrib.operators.qubole_operator.QuboleOperator +.. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator .. autoclass:: airflow.contrib.operators.s3_copy_object_operator.S3CopyObjectOperator .. autoclass:: airflow.contrib.operators.s3_delete_objects_operator.S3DeleteObjectsOperator .. autoclass:: airflow.contrib.operators.s3_list_operator.S3ListOperator .. autoclass:: airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator .. autoclass:: airflow.contrib.operators.s3_to_gcs_transfer_operator.S3ToGoogleCloudStorageTransferOperator +.. autoclass:: airflow.contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator +.. autoclass:: airflow.contrib.operators.sftp_operator.SFTPOperator +.. autoclass:: airflow.contrib.operators.sftp_to_s3_operator.SFTPToS3Operator +.. autoclass:: airflow.contrib.operators.ssh_operator.SSHOperator .. autoclass:: airflow.contrib.operators.sagemaker_base_operator.SageMakerBaseOperator .. autoclass:: airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator .. autoclass:: airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator @@ -251,7 +258,6 @@ Operators .. autoclass:: airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator .. autoclass:: airflow.contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator .. autoclass:: airflow.contrib.operators.segment_track_event_operator.SegmentTrackEventOperator -.. autoclass:: airflow.contrib.operators.sftp_operator.SFTPOperator .. autoclass:: airflow.contrib.operators.slack_webhook_operator.SlackWebhookOperator .. autoclass:: airflow.contrib.operators.snowflake_operator.SnowflakeOperator .. autoclass:: airflow.contrib.operators.sns_publish_operator.SnsPublishOperator @@ -259,9 +265,10 @@ Operators .. autoclass:: airflow.contrib.operators.spark_sql_operator.SparkSqlOperator .. autoclass:: airflow.contrib.operators.spark_submit_operator.SparkSubmitOperator .. autoclass:: airflow.contrib.operators.sqoop_operator.SqoopOperator -.. autoclass:: airflow.contrib.operators.ssh_operator.SSHOperator .. autoclass:: airflow.contrib.operators.vertica_operator.VerticaOperator .. autoclass:: airflow.contrib.operators.vertica_to_hive.VerticaToHiveTransfer +.. autoclass:: airflow.contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer +.. autoclass:: airflow.contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator .. autoclass:: airflow.contrib.operators.winrm_operator.WinRMOperator Sensors @@ -277,12 +284,13 @@ Sensors .. autoclass:: airflow.contrib.sensors.cassandra_table_sensor.CassandraTableSensor .. autoclass:: airflow.contrib.sensors.celery_queue_sensor.CeleryQueueSensor .. autoclass:: airflow.contrib.sensors.datadog_sensor.DatadogSensor +.. autoclass:: airflow.contrib.sensors.weekday_sensor.DayOfWeekSensor .. autoclass:: airflow.contrib.sensors.emr_base_sensor.EmrBaseSensor .. autoclass:: airflow.contrib.sensors.emr_job_flow_sensor.EmrJobFlowSensor .. autoclass:: airflow.contrib.sensors.emr_step_sensor.EmrStepSensor -.. autoclass:: airflow.contrib.sensors.file_sensor.FileSensor -.. autoclass:: airflow.contrib.sensors.ftp_sensor.FTPSensor .. autoclass:: airflow.contrib.sensors.ftp_sensor.FTPSSensor +.. autoclass:: airflow.contrib.sensors.ftp_sensor.FTPSensor +.. autoclass:: airflow.contrib.sensors.file_sensor.FileSensor .. autoclass:: airflow.contrib.sensors.gcs_sensor.GoogleCloudStorageObjectSensor .. autoclass:: airflow.contrib.sensors.gcs_sensor.GoogleCloudStorageObjectUpdatedSensor .. autoclass:: airflow.contrib.sensors.gcs_sensor.GoogleCloudStoragePrefixSensor @@ -290,18 +298,22 @@ Sensors .. autoclass:: airflow.contrib.sensors.hdfs_sensor.HdfsSensorRegex .. autoclass:: airflow.contrib.sensors.imap_attachment_sensor.ImapAttachmentSensor .. autoclass:: airflow.contrib.sensors.jira_sensor.JiraSensor +.. autoclass:: airflow.contrib.sensors.jira_sensor.JiraTicketSensor +.. autoclass:: airflow.contrib.sensors.mongo_sensor.MongoSensor .. autoclass:: airflow.contrib.sensors.pubsub_sensor.PubSubPullSensor .. autoclass:: airflow.contrib.sensors.python_sensor.PythonSensor +.. autoclass:: airflow.contrib.sensors.qubole_sensor.QuboleFileSensor +.. autoclass:: airflow.contrib.sensors.qubole_sensor.QubolePartitionSensor .. autoclass:: airflow.contrib.sensors.qubole_sensor.QuboleSensor .. autoclass:: airflow.contrib.sensors.redis_key_sensor.RedisKeySensor +.. autoclass:: airflow.contrib.sensors.sftp_sensor.SFTPSensor .. autoclass:: airflow.contrib.sensors.sagemaker_base_sensor.SageMakerBaseSensor .. autoclass:: airflow.contrib.sensors.sagemaker_endpoint_sensor.SageMakerEndpointSensor .. autoclass:: airflow.contrib.sensors.sagemaker_training_sensor.SageMakerTrainingSensor .. autoclass:: airflow.contrib.sensors.sagemaker_transform_sensor.SageMakerTransformSensor .. autoclass:: airflow.contrib.sensors.sagemaker_tuning_sensor.SageMakerTuningSensor -.. autoclass:: airflow.contrib.sensors.sftp_sensor.SFTPSensor .. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbBlobSensor -.. autoclass:: airflow.contrib.sensors.weekday_sensor.DayOfWeekSensor +.. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbPrefixSensor .. _macros: @@ -415,7 +427,7 @@ persisted in the database. .. automodule:: airflow.models :show-inheritance: - :members: DAG, BaseOperator, TaskInstance, DagBag, Connection + :members: DAG, TaskInstance, DagBag, Connection, Variable, Pool, KubeResourceVersion, KubeWorkerIdentifier Hooks ----- @@ -423,17 +435,16 @@ Hooks Hooks are interfaces to external platforms and databases, implementing a common interface when possible and acting as building blocks for operators. +.. autoclass:: airflow.hooks.base_hook.BaseHook .. autoclass:: airflow.hooks.dbapi_hook.DbApiHook .. autoclass:: airflow.hooks.docker_hook.DockerHook -.. automodule:: airflow.hooks.hive_hooks - :members: - HiveCliHook, - HiveMetastoreHook, - HiveServer2Hook -.. autoclass:: airflow.hooks.http_hook.HttpHook .. autoclass:: airflow.hooks.druid_hook.DruidDbApiHook .. autoclass:: airflow.hooks.druid_hook.DruidHook .. autoclass:: airflow.hooks.hdfs_hook.HDFSHook +.. autoclass:: airflow.hooks.hive_hooks.HiveCliHook +.. autoclass:: airflow.hooks.hive_hooks.HiveMetastoreHook +.. autoclass:: airflow.hooks.hive_hooks.HiveServer2Hook +.. autoclass:: airflow.hooks.http_hook.HttpHook .. autoclass:: airflow.hooks.jdbc_hook.JdbcHook .. autoclass:: airflow.hooks.mssql_hook.MsSqlHook .. autoclass:: airflow.hooks.mysql_hook.MySqlHook @@ -458,12 +469,21 @@ Community contributed hooks .. autoclass:: airflow.contrib.hooks.aws_hook.AwsHook .. autoclass:: airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook .. autoclass:: airflow.contrib.hooks.aws_sns_hook.AwsSnsHook +.. autoclass:: airflow.contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook +.. autoclass:: airflow.contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook +.. autoclass:: airflow.contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook .. autoclass:: airflow.contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook .. autoclass:: airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook .. autoclass:: airflow.contrib.hooks.azure_fileshare_hook.AzureFileShareHook .. autoclass:: airflow.contrib.hooks.bigquery_hook.BigQueryHook +.. autoclass:: airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook .. autoclass:: airflow.contrib.hooks.cassandra_hook.CassandraHook +.. autoclass:: airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook +.. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook +.. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook .. autoclass:: airflow.contrib.hooks.cloudant_hook.CloudantHook +.. autoclass:: airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook +.. autoclass:: airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook .. autoclass:: airflow.contrib.hooks.databricks_hook.DatabricksHook .. autoclass:: airflow.contrib.hooks.datadog_hook.DatadogHook .. autoclass:: airflow.contrib.hooks.datastore_hook.DatastoreHook @@ -472,34 +492,36 @@ Community contributed hooks .. autoclass:: airflow.contrib.hooks.fs_hook.FSHook .. autoclass:: airflow.contrib.hooks.ftp_hook.FTPHook .. autoclass:: airflow.contrib.hooks.ftp_hook.FTPSHook -.. autoclass:: airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook +.. autoclass:: airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook .. autoclass:: airflow.contrib.hooks.gcp_container_hook.GKEClusterHook -.. autoclass:: airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook -.. autoclass:: airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook -.. autoclass:: airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook -.. autoclass:: airflow.contrib.hooks.gcp_pubsub_hook.PubSubHook +.. autoclass:: airflow.contrib.hooks.gcp_compute_hook.GceHook +.. autoclass:: airflow.contrib.hooks.gcp_function_hook.GcfHook +.. autoclass:: airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook +.. autoclass:: airflow.contrib.hooks.gcp_kms_hook.GoogleCloudKMSHook .. autoclass:: airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook -.. autoclass:: airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook .. autoclass:: airflow.contrib.hooks.imap_hook.ImapHook .. autoclass:: airflow.contrib.hooks.jenkins_hook.JenkinsHook .. autoclass:: airflow.contrib.hooks.jira_hook.JiraHook +.. autoclass:: airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook .. autoclass:: airflow.contrib.hooks.mongo_hook.MongoHook .. autoclass:: airflow.contrib.hooks.openfaas_hook.OpenFaasHook .. autoclass:: airflow.contrib.hooks.pinot_hook.PinotDbApiHook +.. autoclass:: airflow.contrib.hooks.gcp_pubsub_hook.PubSubHook +.. autoclass:: airflow.contrib.hooks.qubole_check_hook.QuboleCheckHook .. autoclass:: airflow.contrib.hooks.qubole_hook.QuboleHook .. autoclass:: airflow.contrib.hooks.redis_hook.RedisHook .. autoclass:: airflow.contrib.hooks.redshift_hook.RedshiftHook +.. autoclass:: airflow.contrib.hooks.sftp_hook.SFTPHook +.. autoclass:: airflow.contrib.hooks.ssh_hook.SSHHook .. autoclass:: airflow.contrib.hooks.sagemaker_hook.SageMakerHook .. autoclass:: airflow.contrib.hooks.salesforce_hook.SalesforceHook .. autoclass:: airflow.contrib.hooks.segment_hook.SegmentHook -.. autoclass:: airflow.contrib.hooks.sftp_hook.SFTPHook .. autoclass:: airflow.contrib.hooks.slack_webhook_hook.SlackWebhookHook .. autoclass:: airflow.contrib.hooks.snowflake_hook.SnowflakeHook .. autoclass:: airflow.contrib.hooks.spark_jdbc_hook.SparkJDBCHook .. autoclass:: airflow.contrib.hooks.spark_sql_hook.SparkSqlHook .. autoclass:: airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook .. autoclass:: airflow.contrib.hooks.sqoop_hook.SqoopHook -.. autoclass:: airflow.contrib.hooks.ssh_hook.SSHHook .. autoclass:: airflow.contrib.hooks.vertica_hook.VerticaHook .. autoclass:: airflow.contrib.hooks.wasb_hook.WasbHook .. autoclass:: airflow.contrib.hooks.winrm_hook.WinRMHook @@ -508,8 +530,8 @@ Executors --------- Executors are the mechanism by which task instances get run. -.. autoclass:: airflow.executors.local_executor.LocalExecutor .. autoclass:: airflow.executors.celery_executor.CeleryExecutor +.. autoclass:: airflow.executors.local_executor.LocalExecutor .. autoclass:: airflow.executors.sequential_executor.SequentialExecutor Community-contributed executors diff --git a/docs/integration.rst b/docs/integration.rst index 797286cbe987d..9ac8e673082a0 100644 --- a/docs/integration.rst +++ b/docs/integration.rst @@ -1014,6 +1014,7 @@ Cloud Bigtable Hook .. autoclass:: airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook :members: + :noindex: Compute Engine '''''''''''''' From 6cfadcde7aea21be4b5e7258b3a600cbd04e9529 Mon Sep 17 00:00:00 2001 From: Ryo Okubo Date: Wed, 6 Feb 2019 14:54:43 +0900 Subject: [PATCH 0047/1104] [AIRFLOW-3814] Add exception details to warning log (#4651) * Add exception details to warning log * Fix log format --- airflow/settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airflow/settings.py b/airflow/settings.py index 035b0ff0e80fb..59767085a2611 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -80,8 +80,8 @@ def timing(cls, stat, dt): port=conf.getint('scheduler', 'statsd_port'), prefix=conf.get('scheduler', 'statsd_prefix')) Stats = statsd -except (socket.gaierror, ImportError): - log.warning("Could not configure StatsClient, using DummyStatsLogger instead.") +except (socket.gaierror, ImportError) as e: + log.warning("Could not configure StatsClient: %s, using DummyStatsLogger instead.", e) HEADER = '\n'.join([ r' ____________ _____________', From e1d3df19992c76aff47fa18b9afca8ebf691e1da Mon Sep 17 00:00:00 2001 From: BasPH Date: Wed, 6 Feb 2019 09:55:16 +0100 Subject: [PATCH 0048/1104] [AIRFLOW-3476,3477] Move Kube classes out of models.py (#4443) --- .../contrib/executors/kubernetes_executor.py | 3 +- airflow/models/__init__.py | 58 +------------- airflow/models/kubernetes.py | 79 +++++++++++++++++++ tests/models.py | 34 -------- tests/models/test_kubernetes.py | 58 ++++++++++++++ 5 files changed, 141 insertions(+), 91 deletions(-) create mode 100644 airflow/models/kubernetes.py create mode 100644 tests/models/test_kubernetes.py diff --git a/airflow/contrib/executors/kubernetes_executor.py b/airflow/contrib/executors/kubernetes_executor.py index 08d9dfb4cbe0f..057bec8b76fe6 100644 --- a/airflow/contrib/executors/kubernetes_executor.py +++ b/airflow/contrib/executors/kubernetes_executor.py @@ -30,7 +30,8 @@ from airflow.contrib.kubernetes.worker_configuration import WorkerConfiguration from airflow.executors.base_executor import BaseExecutor from airflow.executors import Executors -from airflow.models import TaskInstance, KubeResourceVersion, KubeWorkerIdentifier +from airflow.models import TaskInstance +from airflow.models.kubernetes import KubeResourceVersion, KubeWorkerIdentifier from airflow.utils.state import State from airflow.utils.db import provide_session, create_session from airflow import configuration diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 7ce38eb1d2094..f82bbbf0a6099 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -58,13 +58,12 @@ import warnings import hashlib -import uuid from datetime import datetime from urllib.parse import quote from sqlalchemy import ( Boolean, Column, DateTime, Float, Index, Integer, PickleType, String, - Text, UniqueConstraint, and_, func, or_, true as sqltrue + Text, UniqueConstraint, and_, func, or_ ) from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.orm import reconstructor, synonym @@ -84,6 +83,7 @@ from airflow.dag.base_dag import BaseDag, BaseDagBag from airflow.lineage import apply_lineage, prepare_lineage from airflow.models.dagpickle import DagPickle +from airflow.models.kubernetes import KubeWorkerIdentifier, KubeResourceVersion # noqa: F401 from airflow.models.log import Log from airflow.models.taskfail import TaskFail from airflow.models.taskreschedule import TaskReschedule @@ -4748,57 +4748,3 @@ def open_slots(self, session): used_slots = self.used_slots(session=session) queued_slots = self.queued_slots(session=session) return self.slots - used_slots - queued_slots - - -class KubeResourceVersion(Base): - __tablename__ = "kube_resource_version" - one_row_id = Column(Boolean, server_default=sqltrue(), primary_key=True) - resource_version = Column(String(255)) - - @staticmethod - @provide_session - def get_current_resource_version(session=None): - (resource_version,) = session.query(KubeResourceVersion.resource_version).one() - return resource_version - - @staticmethod - @provide_session - def checkpoint_resource_version(resource_version, session=None): - if resource_version: - session.query(KubeResourceVersion).update({ - KubeResourceVersion.resource_version: resource_version - }) - session.commit() - - @staticmethod - @provide_session - def reset_resource_version(session=None): - session.query(KubeResourceVersion).update({ - KubeResourceVersion.resource_version: '0' - }) - session.commit() - return '0' - - -class KubeWorkerIdentifier(Base): - __tablename__ = "kube_worker_uuid" - one_row_id = Column(Boolean, server_default=sqltrue(), primary_key=True) - worker_uuid = Column(String(255)) - - @staticmethod - @provide_session - def get_or_create_current_kube_worker_uuid(session=None): - (worker_uuid,) = session.query(KubeWorkerIdentifier.worker_uuid).one() - if worker_uuid == '': - worker_uuid = str(uuid.uuid4()) - KubeWorkerIdentifier.checkpoint_kube_worker_uuid(worker_uuid, session) - return worker_uuid - - @staticmethod - @provide_session - def checkpoint_kube_worker_uuid(worker_uuid, session=None): - if worker_uuid: - session.query(KubeWorkerIdentifier).update({ - KubeWorkerIdentifier.worker_uuid: worker_uuid - }) - session.commit() diff --git a/airflow/models/kubernetes.py b/airflow/models/kubernetes.py new file mode 100644 index 0000000000000..a18689eefd316 --- /dev/null +++ b/airflow/models/kubernetes.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import uuid + +from sqlalchemy import Column, Boolean, true as sqltrue, String + +from airflow.models.base import Base +from airflow.utils.db import provide_session + + +class KubeResourceVersion(Base): + __tablename__ = "kube_resource_version" + one_row_id = Column(Boolean, server_default=sqltrue(), primary_key=True) + resource_version = Column(String(255)) + + @staticmethod + @provide_session + def get_current_resource_version(session=None): + (resource_version,) = session.query(KubeResourceVersion.resource_version).one() + return resource_version + + @staticmethod + @provide_session + def checkpoint_resource_version(resource_version, session=None): + if resource_version: + session.query(KubeResourceVersion).update({ + KubeResourceVersion.resource_version: resource_version + }) + session.commit() + + @staticmethod + @provide_session + def reset_resource_version(session=None): + session.query(KubeResourceVersion).update({ + KubeResourceVersion.resource_version: '0' + }) + session.commit() + return '0' + + +class KubeWorkerIdentifier(Base): + __tablename__ = "kube_worker_uuid" + one_row_id = Column(Boolean, server_default=sqltrue(), primary_key=True) + worker_uuid = Column(String(255)) + + @staticmethod + @provide_session + def get_or_create_current_kube_worker_uuid(session=None): + (worker_uuid,) = session.query(KubeWorkerIdentifier.worker_uuid).one() + if worker_uuid == '': + worker_uuid = str(uuid.uuid4()) + KubeWorkerIdentifier.checkpoint_kube_worker_uuid(worker_uuid, session) + return worker_uuid + + @staticmethod + @provide_session + def checkpoint_kube_worker_uuid(worker_uuid, session=None): + if worker_uuid: + session.query(KubeWorkerIdentifier).update({ + KubeWorkerIdentifier.worker_uuid: worker_uuid + }) + session.commit() diff --git a/tests/models.py b/tests/models.py index 7b9a5d91d4068..0a013930c9bf3 100644 --- a/tests/models.py +++ b/tests/models.py @@ -47,7 +47,6 @@ from airflow.jobs import BackfillJob from airflow.models import DAG, TaskInstance as TI from airflow.models import DagModel, DagRun -from airflow.models import KubeResourceVersion, KubeWorkerIdentifier from airflow.models import SkipMixin from airflow.models import State as ST from airflow.models import Variable @@ -3455,36 +3454,3 @@ def test_skip_none_tasks(self): SkipMixin().skip(dag_run=None, execution_date=None, tasks=[], session=session) self.assertFalse(session.query.called) self.assertFalse(session.commit.called) - - -class TestKubeResourceVersion(unittest.TestCase): - - def test_checkpoint_resource_version(self): - session = settings.Session() - KubeResourceVersion.checkpoint_resource_version('7', session) - self.assertEqual(KubeResourceVersion.get_current_resource_version(session), '7') - - def test_reset_resource_version(self): - session = settings.Session() - version = KubeResourceVersion.reset_resource_version(session) - self.assertEqual(version, '0') - self.assertEqual(KubeResourceVersion.get_current_resource_version(session), '0') - - -class TestKubeWorkerIdentifier(unittest.TestCase): - - @patch('airflow.models.uuid.uuid4') - def test_get_or_create_not_exist(self, mock_uuid): - session = settings.Session() - session.query(KubeWorkerIdentifier).update({ - KubeWorkerIdentifier.worker_uuid: '' - }) - mock_uuid.return_value = 'abcde' - worker_uuid = KubeWorkerIdentifier.get_or_create_current_kube_worker_uuid(session) - self.assertEqual(worker_uuid, 'abcde') - - def test_get_or_create_exist(self): - session = settings.Session() - KubeWorkerIdentifier.checkpoint_kube_worker_uuid('fghij', session) - worker_uuid = KubeWorkerIdentifier.get_or_create_current_kube_worker_uuid(session) - self.assertEqual(worker_uuid, 'fghij') diff --git a/tests/models/test_kubernetes.py b/tests/models/test_kubernetes.py new file mode 100644 index 0000000000000..45bea723277ec --- /dev/null +++ b/tests/models/test_kubernetes.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +from mock import patch + +from airflow import settings +from airflow.models.kubernetes import KubeResourceVersion, KubeWorkerIdentifier + + +class TestKubeResourceVersion(unittest.TestCase): + + def test_checkpoint_resource_version(self): + session = settings.Session() + KubeResourceVersion.checkpoint_resource_version('7', session) + self.assertEqual(KubeResourceVersion.get_current_resource_version(session), '7') + + def test_reset_resource_version(self): + session = settings.Session() + version = KubeResourceVersion.reset_resource_version(session) + self.assertEqual(version, '0') + self.assertEqual(KubeResourceVersion.get_current_resource_version(session), '0') + + +class TestKubeWorkerIdentifier(unittest.TestCase): + + @patch('airflow.models.uuid.uuid4') + def test_get_or_create_not_exist(self, mock_uuid): + session = settings.Session() + session.query(KubeWorkerIdentifier).update({ + KubeWorkerIdentifier.worker_uuid: '' + }) + mock_uuid.return_value = 'abcde' + worker_uuid = KubeWorkerIdentifier.get_or_create_current_kube_worker_uuid(session) + self.assertEqual(worker_uuid, 'abcde') + + def test_get_or_create_exist(self): + session = settings.Session() + KubeWorkerIdentifier.checkpoint_kube_worker_uuid('fghij', session) + worker_uuid = KubeWorkerIdentifier.get_or_create_current_kube_worker_uuid(session) + self.assertEqual(worker_uuid, 'fghij') From 2e19e1842a20340cf7956ff8ffd52a575fff32be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Wed, 6 Feb 2019 15:35:12 +0100 Subject: [PATCH 0049/1104] [AIRFLOW-3810] Remove duplicate autoclass directive (#4656) --- docs/integration.rst | 1387 +++++++++--------------------------------- 1 file changed, 271 insertions(+), 1116 deletions(-) diff --git a/docs/integration.rst b/docs/integration.rst index 9ac8e673082a0..cacb44201d647 100644 --- a/docs/integration.rst +++ b/docs/integration.rst @@ -1,4 +1,3 @@ - .. Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information @@ -111,50 +110,21 @@ Airflow connection of type `wasb` exists. Authorization can be done by supplying login (=Storage account name) and password (=KEY), or login and SAS token in the extra field (see connection `wasb_default` for an example). -- :ref:`WasbBlobSensor`: Checks if a blob is present on Azure Blob storage. -- :ref:`WasbPrefixSensor`: Checks if blobs matching a prefix are present on Azure Blob storage. -- :ref:`FileToWasbOperator`: Uploads a local file to a container as a blob. -- :ref:`WasbHook`: Interface with Azure Blob Storage. - -.. _WasbBlobSensor: - -WasbBlobSensor -"""""""""""""" - -.. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbBlobSensor - :noindex: - -.. _WasbDeleteBlobOperator: - -WasbDeleteBlobOperator -"""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator - :noindex: - -.. _WasbPrefixSensor: - -WasbPrefixSensor -"""""""""""""""" +:class:`airflow.contrib.hooks.wasb_hook.WasbHook` + Interface with Azure Blob Storage. -.. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbPrefixSensor - :noindex: +:class:`airflow.contrib.sensors.wasb_sensor.WasbBlobSensor` + Checks if a blob is present on Azure Blob storage. -.. _FileToWasbOperator: +:class:`airflow.contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator` + Deletes blob(s) on Azure Blob Storage. -FileToWasbOperator -"""""""""""""""""" +:class:`airflow.contrib.sensors.wasb_sensor.WasbPrefixSensor` + Checks if blobs matching a prefix are present on Azure Blob storage. -.. autoclass:: airflow.contrib.operators.file_to_wasb.FileToWasbOperator - :noindex: +:class:`airflow.contrib.operators.file_to_wasb.FileToWasbOperator` + Uploads a local file to a container as a blob. -.. _WasbHook: - -WasbHook -"""""""" - -.. autoclass:: airflow.contrib.hooks.wasb_hook.WasbHook - :noindex: Azure File Share '''''''''''''''' @@ -164,11 +134,8 @@ type `wasb` exists. Authorization can be done by supplying a login (=Storage acc and password (=Storage account key), or login and SAS token in the extra field (see connection `wasb_default` for an example). -AzureFileShareHook -"""""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.azure_fileshare_hook.AzureFileShareHook - :noindex: +:class:`airflow.contrib.hooks.azure_fileshare_hook.AzureFileShareHook`: + Interface with Azure File Share. Logging ''''''' @@ -184,33 +151,15 @@ Airflow connection of type `azure_cosmos` exists. Authorization can be done by s login (=Endpoint uri), password (=secret key) and extra fields database_name and collection_name to specify the default database and collection to use (see connection `azure_cosmos_default` for an example). -- :ref:`AzureCosmosDBHook`: Interface with Azure CosmosDB. -- :ref:`AzureCosmosInsertDocumentOperator`: Simple operator to insert document into CosmosDB. -- :ref:`AzureCosmosDocumentSensor`: Simple sensor to detect document existence in CosmosDB. - -.. _AzureCosmosDBHook: - -AzureCosmosDBHook -""""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook - :noindex: - -.. _AzureCosmosInsertDocumentOperator: - -AzureCosmosInsertDocumentOperator -""""""""""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator - :noindex: +:class:`airflow.contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook` + Interface with Azure CosmosDB. -.. _AzureCosmosDocumentSensor: +:class:`airflow.contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator` + Simple operator to insert document into CosmosDB. -AzureCosmosDocumentSensor -""""""""""""""""""""""""" +:class:`airflow.contrib.sensors.azure_cosmos_sensor.AzureCosmosDocumentSensor` + Simple sensor to detect document existence in CosmosDB. -.. autoclass:: airflow.contrib.sensors.azure_cosmos_sensor.AzureCosmosDocumentSensor - :noindex: Azure Data Lake ''''''''''''''' @@ -220,81 +169,39 @@ Airflow connection of type `azure_data_lake` exists. Authorization can be done b login (=Client ID), password (=Client Secret) and extra fields tenant (Tenant) and account_name (Account Name) (see connection `azure_data_lake_default` for an example). -- :ref:`AzureDataLakeHook`: Interface with Azure Data Lake. -- :ref:`AzureDataLakeStorageListOperator`: Lists the files located in a specified Azure Data Lake path. -- :ref:`AdlsToGoogleCloudStorageOperator`: Copies files from an Azure Data Lake path to a Google Cloud Storage bucket. +:class:`airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook` + Interface with Azure Data Lake. -.. _AzureDataLakeHook: +:class:`airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator` + Lists the files located in a specified Azure Data Lake path. -AzureDataLakeHook -""""""""""""""""" +:class:`airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator` + Copies files from an Azure Data Lake path to a Google Cloud Storage bucket. -.. autoclass:: airflow.contrib.hooks.azure_data_lake_hook.AzureDataLakeHook - :noindex: - -.. _AzureDataLakeStorageListOperator: - -AzureDataLakeStorageListOperator -"""""""""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator - :noindex: - -.. _AdlsToGoogleCloudStorageOperator: - -AdlsToGoogleCloudStorageOperator -"""""""""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator - :noindex: Azure Container Instances ''''''''''''''''''''''''' Azure Container Instances provides a method to run a docker container without having to worry about managing infrastructure. The AzureContainerInstanceHook requires a service principal. The -credentials for this principal can either be defined in the extra field `key_path`, as an -environment variable named `AZURE_AUTH_LOCATION`, +credentials for this principal can either be defined in the extra field ``key_path``, as an +environment variable named ``AZURE_AUTH_LOCATION``, or by providing a login/password and tenantId in extras. The AzureContainerRegistryHook requires a host/login/password to be defined in the connection. -- :ref:`AzureContainerInstancesOperator` : Start/Monitor a new ACI. -- :ref:`AzureContainerInstanceHook` : Wrapper around a single ACI. -- :ref:`AzureContainerRegistryHook` : Wrapper around a ACR -- :ref:`AzureContainerVolumeHook` : Wrapper around Container Volumes - -.. _AzureContainerInstancesOperator: - -AzureContainerInstancesOperator -""""""""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator - :noindex: +:class:`airflow.contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook` + Interface with Azure Container Volumes -.. _AzureContainerInstanceHook: +:class:`airflow.contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator` + Start/Monitor a new ACI. -AzureContainerInstanceHook -"""""""""""""""""""""""""" +:class:`airflow.contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook` + Wrapper around a single ACI. -.. autoclass:: airflow.contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook - :noindex: +:class:`airflow.contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook` + Interface with ACR -.. _AzureContainerRegistryHook: - -AzureContainerRegistryHook -"""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook - :noindex: - -.. _AzureContainerVolumeHook: - -AzureContainerVolumeHook -"""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook - :noindex: .. _AWS: @@ -308,218 +215,88 @@ Operators are in the contrib section. AWS EMR ''''''' -- :ref:`EmrAddStepsOperator` : Adds steps to an existing EMR JobFlow. -- :ref:`EmrCreateJobFlowOperator` : Creates an EMR JobFlow, reading the config from the EMR connection. -- :ref:`EmrTerminateJobFlowOperator` : Terminates an EMR JobFlow. -- :ref:`EmrHook` : Interact with AWS EMR. - -.. _EmrAddStepsOperator: - -EmrAddStepsOperator -""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.emr_add_steps_operator.EmrAddStepsOperator - :noindex: - -.. _EmrCreateJobFlowOperator: +:class:`airflow.contrib.hooks.emr_hook.EmrHook` + Interface with AWS EMR. -EmrCreateJobFlowOperator -"""""""""""""""""""""""" +:class:`airflow.contrib.operators.emr_add_steps_operator.EmrAddStepsOperator` + Adds steps to an existing EMR JobFlow. -.. autoclass:: airflow.contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator - :noindex: +:class:`airflow.contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator` + Creates an EMR JobFlow, reading the config from the EMR connection. -.. _EmrTerminateJobFlowOperator: - -EmrTerminateJobFlowOperator -""""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator - :noindex: - -.. _EmrHook: - -EmrHook -""""""" - -.. autoclass:: airflow.contrib.hooks.emr_hook.EmrHook - :noindex: +:class:`airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator` + Terminates an EMR JobFlow. AWS S3 '''''' -- :ref:`S3Hook` : Interact with AWS S3. -- :ref:`S3FileTransformOperator` : Copies data from a source S3 location to a temporary location on the local filesystem. -- :ref:`S3ListOperator` : Lists the files matching a key prefix from a S3 location. -- :ref:`S3ToGoogleCloudStorageOperator` : Syncs an S3 location with a Google Cloud Storage bucket. -- :ref:`S3ToGoogleCloudStorageTransferOperator` : Syncs an S3 bucket with a Google Cloud Storage bucket using the GCP Storage Transfer Service. -- :ref:`S3ToHiveTransfer` : Moves data from S3 to Hive. The operator downloads a file from S3, stores the file locally before loading it into a Hive table. - -.. _S3Hook: - -S3Hook -"""""" - -.. autoclass:: airflow.hooks.S3_hook.S3Hook - :noindex: - -.. _S3FileTransformOperator: - -S3FileTransformOperator -""""""""""""""""""""""" - -.. autoclass:: airflow.operators.s3_file_transform_operator.S3FileTransformOperator - :noindex: - -.. _S3ListOperator: - -S3ListOperator -"""""""""""""" - -.. autoclass:: airflow.contrib.operators.s3_list_operator.S3ListOperator - :noindex: - -.. _S3ToGoogleCloudStorageOperator: - -S3ToGoogleCloudStorageOperator -"""""""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator - :noindex: - -.. _S3ToGoogleCloudStorageTransferOperator: +:class:`airflow.hooks.S3_hook.S3Hook` + Interface with AWS S3. -S3ToGoogleCloudStorageTransferOperator -"""""""""""""""""""""""""""""""""""""" +:class:`airflow.operators.s3_file_transform_operator.S3FileTransformOperator` + Copies data from a source S3 location to a temporary location on the local filesystem. -.. autoclass:: airflow.contrib.operators.s3_to_gcs_transfer_operator.S3ToGoogleCloudStorageTransferOperator - :noindex: +:class:`airflow.contrib.operators.s3_list_operator.S3ListOperator` + Lists the files matching a key prefix from a S3 location. -.. _S3ToHiveTransfer: +:class:`airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator` + Syncs an S3 location with a Google Cloud Storage bucket. -S3ToHiveTransfer -"""""""""""""""" +:class:`airflow.contrib.operators.s3_to_gcs_transfer_operator.S3ToGoogleCloudStorageTransferOperator` + Syncs an S3 bucket with a Google Cloud Storage bucket using the GCP Storage Transfer Service. -.. autoclass:: airflow.operators.s3_to_hive_operator.S3ToHiveTransfer - :noindex: - - -AWS EC2 Container Service -''''''''''''''''''''''''' - -- :ref:`ECSOperator` : Execute a task on AWS EC2 Container Service. - -.. _ECSOperator: - -ECSOperator -""""""""""" - -.. autoclass:: airflow.contrib.operators.ecs_operator.ECSOperator - :noindex: +:class:`airflow.operators.s3_to_hive_operator.S3ToHiveTransfer` + Moves data from S3 to Hive. The operator downloads a file from S3, stores the file locally before loading it into a Hive table. AWS Batch Service ''''''''''''''''' -- :ref:`AWSBatchOperator` : Execute a task on AWS Batch Service. - -.. _AWSBatchOperator: - -AWSBatchOperator -"""""""""""""""" - -.. autoclass:: airflow.contrib.operators.awsbatch_operator.AWSBatchOperator - :noindex: +:class:`airflow.contrib.operators.awsbatch_operator.AWSBatchOperator` + Execute a task on AWS Batch Service. AWS RedShift '''''''''''' -- :ref:`AwsRedshiftClusterSensor` : Waits for a Redshift cluster to reach a specific status. -- :ref:`RedshiftHook` : Interact with AWS Redshift, using the boto3 library. -- :ref:`RedshiftToS3Transfer` : Executes an unload command to S3 as CSV with or without headers. -- :ref:`S3ToRedshiftTransfer` : Executes an copy command from S3 as CSV with or without headers. - -.. _AwsRedshiftClusterSensor: - -AwsRedshiftClusterSensor -"""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.sensors.aws_redshift_cluster_sensor.AwsRedshiftClusterSensor - :noindex: - -.. _RedshiftHook: +:class:`airflow.contrib.sensors.aws_redshift_cluster_sensor.AwsRedshiftClusterSensor` + Waits for a Redshift cluster to reach a specific status. -RedshiftHook -"""""""""""" +:class:`airflow.contrib.hooks.redshift_hook.RedshiftHook` + Interact with AWS Redshift, using the boto3 library. -.. autoclass:: airflow.contrib.hooks.redshift_hook.RedshiftHook - :noindex: +:class:`airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer` + Executes an unload command to S3 as CSV with or without headers. -.. _RedshiftToS3Transfer: +:class:`airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer` + Executes an copy command from S3 as CSV with or without headers. -RedshiftToS3Transfer -"""""""""""""""""""" -.. autoclass:: airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer - :noindex: - -.. _S3ToRedshiftTransfer: - -S3ToRedshiftTransfer -"""""""""""""""""""" - -.. autoclass:: airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer - :noindex: AWS DynamoDB '''''''''''' -- :ref:`HiveToDynamoDBTransferOperator` : Moves data from Hive to DynamoDB. -- :ref:`AwsDynamoDBHook` : Interact with AWS DynamoDB. - -.. _HiveToDynamoDBTransferOperator: +:class:`airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator` + Moves data from Hive to DynamoDB. -HiveToDynamoDBTransferOperator -"""""""""""""""""""""""""""""" +:class:`airflow.contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook` + Interface with AWS DynamoDB. -.. autoclass:: airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator - :noindex: - -.. _AwsDynamoDBHook: - -AwsDynamoDBHook -""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook - :noindex: AWS Lambda '''''''''' -- :ref:`AwsLambdaHook` : Interact with AWS Lambda. - -.. _AwsLambdaHook: - -AwsLambdaHook -""""""""""""" +:class:`airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook` + Interface with AWS Lambda. -.. autoclass:: airflow.contrib.hooks.aws_lambda_hook.AwsLambdaHook - :noindex: AWS Kinesis ''''''''''' -- :ref:`AwsFirehoseHook` : Interact with AWS Kinesis Firehose. +:class:`airflow.contrib.hooks.aws_firehose_hook.AwsFirehoseHook` + Interface with AWS Kinesis Firehose. -.. _AwsFirehoseHook: - -AwsFirehoseHook -""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.aws_firehose_hook.AwsFirehoseHook - :noindex: Amazon SageMaker '''''''''''''''' @@ -528,69 +305,27 @@ For more instructions on using Amazon SageMaker in Airflow, please see `the Sage .. _the SageMaker Python SDK README: https://github.com/aws/sagemaker-python-sdk/blob/master/src/sagemaker/workflow/README.rst -- :ref:`SageMakerHook` : Interact with Amazon SageMaker. -- :ref:`SageMakerTrainingOperator` : Create a SageMaker training job. -- :ref:`SageMakerTuningOperator` : Create a SageMaker tuning job. -- :ref:`SageMakerModelOperator` : Create a SageMaker model. -- :ref:`SageMakerTransformOperator` : Create a SageMaker transform job. -- :ref:`SageMakerEndpointConfigOperator` : Create a SageMaker endpoint config. -- :ref:`SageMakerEndpointOperator` : Create a SageMaker endpoint. - -.. _SageMakerHook: - -SageMakerHook -""""""""""""" - -.. autoclass:: airflow.contrib.hooks.sagemaker_hook.SageMakerHook - :noindex: - -.. _SageMakerTrainingOperator: - -SageMakerTrainingOperator -""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator - :noindex: - -.. _SageMakerTuningOperator: - -SageMakerTuningOperator -""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator - :noindex: - -.. _SageMakerModelOperator: - -SageMakerModelOperator -"""""""""""""""""""""" +:class:`airflow.contrib.hooks.sagemaker_hook.SageMakerHook` + Interface with Amazon SageMaker. -.. autoclass:: airflow.contrib.operators.sagemaker_model_operator.SageMakerModelOperator - :noindex: +:class:`airflow.contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator` + Create a SageMaker training job. -.. _SageMakerTransformOperator: +:class:`airflow.contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator` + Create a SageMaker tuning job. -SageMakerTransformOperator -"""""""""""""""""""""""""" +:class:`airflow.contrib.operators.sagemaker_model_operator.SageMakerModelOperator` + Create a SageMaker model. -.. autoclass:: airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator - :noindex: +:class:`airflow.contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator` + Create a SageMaker transform job. -.. _SageMakerEndpointConfigOperator: +:class:`airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator` + Create a SageMaker endpoint config. -SageMakerEndpointConfigOperator -""""""""""""""""""""""""""""""" +:class:`airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator` + Create a SageMaker endpoint. -.. autoclass:: airflow.contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator - :noindex: - -.. _SageMakerEndpointOperator: - -SageMakerEndpointOperator -""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator - :noindex: .. _Databricks: @@ -601,11 +336,12 @@ Databricks submitting runs to the Databricks platform. Internally the operator talks to the ``api/2.0/jobs/runs/submit`` `endpoint `_. -DatabricksSubmitRunOperator -''''''''''''''''''''''''''' -.. autoclass:: airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator - :noindex: +:class:`airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator` + Submits a Spark job run to Databricks using the + `api/2.0/jobs/runs/submit + `_ + API endpoint. .. _GCP: @@ -626,522 +362,197 @@ Logging Airflow can be configured to read and write task logs in Google Cloud Storage. See :ref:`write-logs-gcp`. + GoogleCloudBaseHook ''''''''''''''''''' -.. autoclass:: airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook - :noindex: +All hooks is based on :class:`airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook`. -.. _GoogleCloudBaseHook: BigQuery '''''''' -BigQuery Operators -"""""""""""""""""" - -- :ref:`BigQueryCheckOperator` : Performs checks against a SQL query that will return a single row with different values. -- :ref:`BigQueryValueCheckOperator` : Performs a simple value check using SQL code. -- :ref:`BigQueryIntervalCheckOperator` : Checks that the values of metrics given as SQL expressions are within a certain tolerance of the ones from days_back before. -- :ref:`BigQueryGetDataOperator` : Fetches the data from a BigQuery table and returns data in a python list -- :ref:`BigQueryCreateEmptyDatasetOperator` : Creates an empty BigQuery dataset. -- :ref:`BigQueryCreateEmptyTableOperator` : Creates a new, empty table in the specified BigQuery dataset optionally with schema. -- :ref:`BigQueryCreateExternalTableOperator` : Creates a new, external table in the dataset with the data in Google Cloud Storage. -- :ref:`BigQueryDeleteDatasetOperator` : Deletes an existing BigQuery dataset. -- :ref:`BigQueryTableDeleteOperator` : Deletes an existing BigQuery table. -- :ref:`BigQueryOperator` : Executes BigQuery SQL queries in a specific BigQuery database. -- :ref:`BigQueryToBigQueryOperator` : Copy a BigQuery table to another BigQuery table. -- :ref:`BigQueryToCloudStorageOperator` : Transfers a BigQuery table to a Google Cloud Storage bucket - - -.. _BigQueryCheckOperator: - -BigQueryCheckOperator -^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator - :noindex: - -.. _BigQueryValueCheckOperator: - -BigQueryValueCheckOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator - :noindex: - -.. _BigQueryIntervalCheckOperator: - -BigQueryIntervalCheckOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator - :noindex: - -.. _BigQueryGetDataOperator: - -BigQueryGetDataOperator -^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator - :noindex: +:class:`airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator` + Performs checks against a SQL query that will return a single row with different values. -.. _BigQueryCreateEmptyTableOperator: +:class:`airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator` + Checks that the values of metrics given as SQL expressions are within a certain tolerance of the ones from days_back before. -BigQueryCreateEmptyTableOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator` + Performs a simple value check using SQL code. -.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator - :noindex: +:class:`airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator` + Fetches the data from a BigQuery table and returns data in a python list -.. _BigQueryCreateExternalTableOperator: +:class:`airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator` + Creates an empty BigQuery dataset. -BigQueryCreateExternalTableOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator` + Creates a new, empty table in the specified BigQuery dataset optionally with schema. -.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator - :noindex: +:class:`airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator` + Creates a new, external table in the dataset with the data in Google Cloud Storage. -.. _BigQueryCreateEmptyDatasetOperator: +:class:`airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator` + Deletes an existing BigQuery dataset. -BigQueryCreateEmptyDatasetOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.bigquery_operator.BigQueryOperator` + Executes BigQuery SQL queries in a specific BigQuery database. -.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator - :noindex: +:class:`airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator` + Deletes an existing BigQuery table. -.. _BigQueryDeleteDatasetOperator: +:class:`airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator` + Copy a BigQuery table to another BigQuery table. -BigQueryDeleteDatasetOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator` + Transfers a BigQuery table to a Google Cloud Storage bucket -.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator - :noindex: -.. _BigQueryTableDeleteOperator: +They also use :class:`airflow.contrib.hooks.bigquery_hook.BigQueryHook` to communicate with Google Cloud Platform. -BigQueryTableDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator - :noindex: - -.. _BigQueryOperator: - -BigQueryOperator -^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator - :noindex: - -.. _BigQueryToBigQueryOperator: - -BigQueryToBigQueryOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator - :noindex: - -.. _BigQueryToCloudStorageOperator: - -BigQueryToCloudStorageOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator - :noindex: - - -BigQueryHook -"""""""""""" - -.. autoclass:: airflow.contrib.hooks.bigquery_hook.BigQueryHook - :members: - :noindex: Cloud Spanner ''''''''''''' -Cloud Spanner Operators -""""""""""""""""""""""" - -- :ref:`CloudSpannerInstanceDatabaseDeleteOperator` : deletes an existing database from - a Google Cloud Spanner instance or returns success if the database is missing. -- :ref:`CloudSpannerInstanceDatabaseDeployOperator` : creates a new database in a Google - Cloud instance or returns success if the database already exists. -- :ref:`CloudSpannerInstanceDatabaseUpdateOperator` : updates the structure of a - Google Cloud Spanner database. -- :ref:`CloudSpannerInstanceDatabaseQueryOperator` : executes an arbitrary DML query - (INSERT, UPDATE, DELETE). -- :ref:`CloudSpannerInstanceDeployOperator` : creates a new Google Cloud Spanner instance, - or if an instance with the same name exists, updates the instance. -- :ref:`CloudSpannerInstanceDeleteOperator` : deletes a Google Cloud Spanner instance. - -.. _CloudSpannerInstanceDatabaseDeleteOperator: - -CloudSpannerInstanceDatabaseDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator - :noindex: - -.. _CloudSpannerInstanceDatabaseDeployOperator: +:class:`airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator` + deletes an existing database from a Google Cloud Spanner instance or returns success if the database is missing. -CloudSpannerInstanceDatabaseDeployOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator` + creates a new database in a Google Cloud instance or returns success if the database already exists. -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator - :noindex: +:class:`airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator` + executes an arbitrary DML query (INSERT, UPDATE, DELETE). -.. _CloudSpannerInstanceDatabaseUpdateOperator: +:class:`airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator` + updates the structure of a Google Cloud Spanner database. -CloudSpannerInstanceDatabaseUpdateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator` + deletes a Google Cloud Spanner instance. -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator - :noindex: +:class:`airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator` + creates a new Google Cloud Spanner instance, or if an instance with the same name exists, updates the instance. -.. _CloudSpannerInstanceDatabaseQueryOperator: -CloudSpannerInstanceDatabaseQueryOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator - :noindex: - -.. _CloudSpannerInstanceDeployOperator: - -CloudSpannerInstanceDeployOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator - :noindex: - -.. _CloudSpannerInstanceDeleteOperator: - -CloudSpannerInstanceDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator - :noindex: - - -CloudSpannerHook -"""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook - :members: - :noindex: +They also use :class:`airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook` to communicate with Google Cloud Platform. Cloud SQL ''''''''' -Cloud SQL Operators -""""""""""""""""""" - -- :ref:`CloudSqlInstanceDatabaseDeleteOperator` : deletes a database from a Cloud SQL - instance. -- :ref:`CloudSqlInstanceDatabaseCreateOperator` : creates a new database inside a Cloud - SQL instance. -- :ref:`CloudSqlInstanceDatabasePatchOperator` : updates a database inside a Cloud - SQL instance. -- :ref:`CloudSqlInstanceDeleteOperator` : delete a Cloud SQL instance. -- :ref:`CloudSqlInstanceExportOperator` : exports data from a Cloud SQL instance. -- :ref:`CloudSqlInstanceImportOperator` : imports data into a Cloud SQL instance. -- :ref:`CloudSqlInstanceCreateOperator` : create a new Cloud SQL instance. -- :ref:`CloudSqlInstancePatchOperator` : patch a Cloud SQL instance. -- :ref:`CloudSqlQueryOperator` : run query in a Cloud SQL instance. - -.. _CloudSqlInstanceDatabaseDeleteOperator: - -CloudSqlInstanceDatabaseDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator - :noindex: - -.. _CloudSqlInstanceDatabaseCreateOperator: - -CloudSqlInstanceDatabaseCreateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator - :noindex: - -.. _CloudSqlInstanceDatabasePatchOperator: +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator` + create a new Cloud SQL instance. -CloudSqlInstanceDatabasePatchOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator` + creates a new database inside a Cloud SQL instance. -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator - :noindex: +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator` + deletes a database from a Cloud SQL instance. -.. _CloudSqlInstanceDeleteOperator: +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator` + updates a database inside a Cloud SQL instance. -CloudSqlInstanceDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator` + delete a Cloud SQL instance. -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator - :noindex: +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator` + exports data from a Cloud SQL instance. -.. _CloudSqlInstanceExportOperator: +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator` + imports data into a Cloud SQL instance. -CloudSqlInstanceExportOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator` + patch a Cloud SQL instance. -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator - :noindex: +:class:`airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator` + run query in a Cloud SQL instance. -.. _CloudSqlInstanceImportOperator: -CloudSqlInstanceImportOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator - :noindex: - -.. _CloudSqlInstanceCreateOperator: - -CloudSqlInstanceCreateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator - :noindex: - -.. _CloudSqlInstancePatchOperator: - -CloudSqlInstancePatchOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator - :noindex: - -.. _CloudSqlQueryOperator: - -CloudSqlQueryOperator -^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_sql_operator.CloudSqlQueryOperator - :noindex: - -Cloud SQL Hooks -""""""""""""""" - -.. _CloudSqlHook: - -.. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook - :members: - :noindex: - -.. _CloudSqlDatabaseHook: - -.. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook - :members: - :noindex: - -.. _CloudSqlProxyRunner: - -.. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlProxyRunner - :members: - :noindex: +They also use :class:`airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook` and :class:`airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook` to communicate with Google Cloud Platform. Cloud Bigtable '''''''''''''' -Cloud Bigtable Operators -"""""""""""""""""""""""" - -- :ref:`BigtableInstanceCreateOperator` : creates a Cloud Bigtable instance. -- :ref:`BigtableInstanceDeleteOperator` : deletes a Google Cloud Bigtable instance. -- :ref:`BigtableClusterUpdateOperator` : updates the number of nodes in a Google Cloud Bigtable cluster. -- :ref:`BigtableTableCreateOperator` : creates a table in a Google Cloud Bigtable instance. -- :ref:`BigtableTableDeleteOperator` : deletes a table in a Google Cloud Bigtable instance. -- :ref:`BigtableTableWaitForReplicationSensor` : (sensor) waits for a table to be fully replicated. +:class:`airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator` + updates the number of nodes in a Google Cloud Bigtable cluster. -.. _BigtableInstanceCreateOperator: +:class:`airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator` + creates a Cloud Bigtable instance. -BigtableInstanceCreateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator` + deletes a Google Cloud Bigtable instance. -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator - :noindex: +:class:`airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator` + creates a table in a Google Cloud Bigtable instance. -.. _BigtableInstanceDeleteOperator: +:class:`airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator` + deletes a table in a Google Cloud Bigtable instance. -BigtableInstanceDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor` + (sensor) waits for a table to be fully replicated. -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator - :noindex: -.. _BigtableClusterUpdateOperator: +They also use :class:`airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook` to communicate with Google Cloud Platform. -BigtableClusterUpdateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator - :noindex: - -.. _BigtableTableCreateOperator: - -BigtableTableCreateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator - :noindex: - -.. _BigtableTableDeleteOperator: - -BigtableTableDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator - :noindex: - -.. _BigtableTableWaitForReplicationSensor: - -BigtableTableWaitForReplicationSensor -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor - :noindex: - -.. _BigtableHook: - -Cloud Bigtable Hook -"""""""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.gcp_bigtable_hook.BigtableHook - :members: - :noindex: Compute Engine '''''''''''''' -Compute Engine Operators -"""""""""""""""""""""""" - -- :ref:`GceInstanceStartOperator` : start an existing Google Compute Engine instance. -- :ref:`GceInstanceStopOperator` : stop an existing Google Compute Engine instance. -- :ref:`GceSetMachineTypeOperator` : change the machine type for a stopped instance. -- :ref:`GceInstanceTemplateCopyOperator` : copy the Instance Template, applying - specified changes. -- :ref:`GceInstanceGroupManagerUpdateTemplateOperator` : patch the Instance Group Manager, - replacing source Instance Template URL with the destination one. - -The operators have the common base operator: - -.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceBaseOperator - :noindex: - -They also use :ref:`GceHook` to communicate with Google Cloud Platform. - -.. _GceInstanceStartOperator: +:class:`airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator` + start an existing Google Compute Engine instance. -GceInstanceStartOperator -^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator` + stop an existing Google Compute Engine instance. -.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceStartOperator - :noindex: +:class:`airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator` + change the machine type for a stopped instance. -.. _GceInstanceStopOperator: +:class:`airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator` + copy the Instance Template, applying specified changes. -GceInstanceStopOperator -^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator` + patch the Instance Group Manager, replacing source Instance Template URL with the destination one. -.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceStopOperator - :noindex: -.. _GceSetMachineTypeOperator: +The operators have the common base operator :class:`airflow.contrib.operators.gcp_compute_operator.GceBaseOperator` -GceSetMachineTypeOperator -^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator - :noindex: - -.. _GceInstanceTemplateCopyOperator: - -GceInstanceTemplateCopyOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator - :noindex: - -.. _GceInstanceGroupManagerUpdateTemplateOperator: - -GceInstanceGroupManagerUpdateTemplateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator - :noindex: - -.. _GceHook: - -Compute Engine Hook -""""""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.gcp_compute_hook.GceHook - :members: - :noindex: +They also use :class:`airflow.contrib.hooks.gcp_compute_hook.GceHook` to communicate with Google Cloud Platform. Cloud Functions ''''''''''''''' -Cloud Functions Operators -""""""""""""""""""""""""" - -- :ref:`GcfFunctionDeployOperator` : deploy Google Cloud Function to Google Cloud Platform -- :ref:`GcfFunctionDeleteOperator` : delete Google Cloud Function in Google Cloud Platform - -They also use :ref:`GcfHook` to communicate with Google Cloud Platform. - -.. _GcfFunctionDeployOperator: - -GcfFunctionDeployOperator -^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator` + deploy Google Cloud Function to Google Cloud Platform -.. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator - :noindex: +:class:`airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator` + delete Google Cloud Function in Google Cloud Platform -.. _GcfFunctionDeleteOperator: -GcfFunctionDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^ +They also use :class:`airflow.contrib.hooks.gcp_function_hook.GcfHook` to communicate with Google Cloud Platform. -.. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator - :noindex: -.. _GcfHook: - -Cloud Functions Hook -"""""""""""""""""""" +Cloud DataFlow +'''''''''''''' -.. autoclass:: airflow.contrib.hooks.gcp_function_hook.GcfHook - :members: - :noindex: +:class:`airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator` + launching Cloud Dataflow jobs written in Java. +:class:`airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator` + launching a templated Cloud DataFlow batch job. -Cloud DataFlow -'''''''''''''' +:class:`airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator` + launching Cloud Dataflow jobs written in python. -DataFlow Operators -"""""""""""""""""" -- :ref:`DataFlowJavaOperator` : launching Cloud Dataflow jobs written in Java. -- :ref:`DataflowTemplateOperator` : launching a templated Cloud DataFlow batch job. -- :ref:`DataFlowPythonOperator` : launching Cloud Dataflow jobs written in python. +They also use :class:`airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook` to communicate with Google Cloud Platform. .. _DataFlowJavaOperator: DataFlowJavaOperator ^^^^^^^^^^^^^^^^^^^^ -.. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator - :noindex: - .. code:: python default_args = { @@ -1176,381 +587,134 @@ DataFlowJavaOperator }, dag=dag) -.. _DataflowTemplateOperator: - -DataflowTemplateOperator -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataflow_operator.DataflowTemplateOperator - :noindex: - -.. _DataFlowPythonOperator: - -DataFlowPythonOperator -^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator - :noindex: - - -DataFlowHook -"""""""""""" - -.. autoclass:: airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook - :members: - :noindex: - - Cloud DataProc '''''''''''''' -DataProc Operators -"""""""""""""""""" - -- :ref:`DataprocClusterCreateOperator` : Create a new cluster on Google Cloud Dataproc. -- :ref:`DataprocClusterDeleteOperator` : Delete a cluster on Google Cloud Dataproc. -- :ref:`DataprocClusterScaleOperator` : Scale up or down a cluster on Google Cloud Dataproc. -- :ref:`DataProcPigOperator` : Start a Pig query Job on a Cloud DataProc cluster. -- :ref:`DataProcHiveOperator` : Start a Hive query Job on a Cloud DataProc cluster. -- :ref:`DataProcSparkSqlOperator` : Start a Spark SQL query Job on a Cloud DataProc cluster. -- :ref:`DataProcSparkOperator` : Start a Spark Job on a Cloud DataProc cluster. -- :ref:`DataProcHadoopOperator` : Start a Hadoop Job on a Cloud DataProc cluster. -- :ref:`DataProcPySparkOperator` : Start a PySpark Job on a Cloud DataProc cluster. -- :ref:`DataprocWorkflowTemplateInstantiateOperator` : Instantiate a WorkflowTemplate on Google Cloud Dataproc. -- :ref:`DataprocWorkflowTemplateInstantiateInlineOperator` : Instantiate a WorkflowTemplate Inline on Google Cloud Dataproc. - -.. _DataprocClusterCreateOperator: - -DataprocClusterCreateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator - :noindex: - -.. _DataprocClusterScaleOperator: - -DataprocClusterScaleOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.dataproc_operator.DataprocClusterCreateOperator` + Create a new cluster on Google Cloud Dataproc. -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator - :noindex: +:class:`airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator` + Delete a cluster on Google Cloud Dataproc. -.. _DataprocClusterDeleteOperator: +:class:`airflow.contrib.operators.dataproc_operator.DataprocClusterScaleOperator` + Scale up or down a cluster on Google Cloud Dataproc. -DataprocClusterDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator` + Start a Hadoop Job on a Cloud DataProc cluster. -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocClusterDeleteOperator - :noindex: +:class:`airflow.contrib.operators.dataproc_operator.DataProcHiveOperator` + Start a Hive query Job on a Cloud DataProc cluster. -.. _DataProcPigOperator: +:class:`airflow.contrib.operators.dataproc_operator.DataProcPigOperator` + Start a Pig query Job on a Cloud DataProc cluster. -DataProcPigOperator -^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator` + Start a PySpark Job on a Cloud DataProc cluster. -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPigOperator - :noindex: +:class:`airflow.contrib.operators.dataproc_operator.DataProcSparkOperator` + Start a Spark Job on a Cloud DataProc cluster. -.. _DataProcHiveOperator: +:class:`airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator` + Start a Spark SQL query Job on a Cloud DataProc cluster. -DataProcHiveOperator -^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHiveOperator - :noindex: - -.. _DataProcSparkSqlOperator: - -DataProcSparkSqlOperator -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator - :noindex: - -.. _DataProcSparkOperator: - -DataProcSparkOperator -^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkOperator - :noindex: - -.. _DataProcHadoopOperator: - -DataProcHadoopOperator -^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator - :noindex: - -.. _DataProcPySparkOperator: - -DataProcPySparkOperator -^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPySparkOperator - :noindex: - -.. _DataprocWorkflowTemplateInstantiateOperator: - -DataprocWorkflowTemplateInstantiateOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator - :noindex: - -.. _DataprocWorkflowTemplateInstantiateInlineOperator: +:class:`airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator` + Instantiate a WorkflowTemplate Inline on Google Cloud Dataproc. -DataprocWorkflowTemplateInstantiateInlineOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator` + Instantiate a WorkflowTemplate on Google Cloud Dataproc. -.. autoclass:: airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator - :noindex: Cloud Datastore ''''''''''''''' -Datastore Operators -""""""""""""""""""" +:class:`airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator` + Export entities from Google Cloud Datastore to Cloud Storage. -- :ref:`DatastoreExportOperator` : Export entities from Google Cloud Datastore to Cloud Storage. -- :ref:`DatastoreImportOperator` : Import entities from Cloud Storage to Google Cloud Datastore. +:class:`airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator` + Import entities from Cloud Storage to Google Cloud Datastore. -.. _DatastoreExportOperator: -DatastoreExportOperator -^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator - :noindex: - -.. _DatastoreImportOperator: - -DatastoreImportOperator -^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator - :noindex: - -DatastoreHook -""""""""""""" - -.. autoclass:: airflow.contrib.hooks.datastore_hook.DatastoreHook - :members: - :noindex: +They also use :class:`airflow.contrib.hooks.datastore_hook.DatastoreHook` to communicate with Google Cloud Platform. Cloud ML Engine ''''''''''''''' -Cloud ML Engine Operators -""""""""""""""""""""""""" - -- :ref:`MLEngineBatchPredictionOperator` : Start a Cloud ML Engine batch prediction job. -- :ref:`MLEngineModelOperator` : Manages a Cloud ML Engine model. -- :ref:`MLEngineTrainingOperator` : Start a Cloud ML Engine training job. -- :ref:`MLEngineVersionOperator` : Manages a Cloud ML Engine model version. - -.. _MLEngineBatchPredictionOperator: - -MLEngineBatchPredictionOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator - :members: - :noindex: - -.. _MLEngineModelOperator: - -MLEngineModelOperator -^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineModelOperator - :members: - :noindex: - -.. _MLEngineTrainingOperator: - -MLEngineTrainingOperator -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator - :members: - :noindex: - -.. _MLEngineVersionOperator: - -MLEngineVersionOperator -^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator` + Start a Cloud ML Engine batch prediction job. -.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator - :members: - :noindex: +:class:`airflow.contrib.operators.mlengine_operator.MLEngineModelOperator` + Manages a Cloud ML Engine model. -Cloud ML Engine Hook -"""""""""""""""""""" +:class:`airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator` + Start a Cloud ML Engine training job. -.. _MLEngineHook: +:class:`airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator` + Manages a Cloud ML Engine model version. -MLEngineHook -^^^^^^^^^^^^ -.. autoclass:: airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook - :members: - :noindex: +They also use :class:`airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook` to communicate with Google Cloud Platform. Cloud Storage ''''''''''''' -Storage Operators -""""""""""""""""" +:class:`airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator` + Uploads a file to Google Cloud Storage. -- :ref:`FileToGoogleCloudStorageOperator` : Uploads a file to Google Cloud Storage. -- :ref:`GoogleCloudStorageBucketCreateAclEntryOperator` : Creates a new ACL entry on the specified bucket. -- :ref:`GoogleCloudStorageCreateBucketOperator` : Creates a new cloud storage bucket. -- :ref:`GoogleCloudStorageDownloadOperator` : Downloads a file from Google Cloud Storage. -- :ref:`GoogleCloudStorageListOperator` : List all objects from the bucket with the give string prefix and delimiter in name. -- :ref:`GoogleCloudStorageObjectCreateAclEntryOperator` : Creates a new ACL entry on the specified object. -- :ref:`GoogleCloudStorageToBigQueryOperator` : Loads files from Google cloud storage into BigQuery. -- :ref:`GoogleCloudStorageToGoogleCloudStorageOperator` : Copies objects from a bucket to another, with renaming if requested. -- :ref:`GoogleCloudStorageToGoogleCloudStorageTransferOperator` : Copies objects from a bucket to another using Google Transfer service. -- :ref:`MySqlToGoogleCloudStorageOperator`: Copy data from any MySQL Database to Google cloud storage in JSON format. +:class:`airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator` + Creates a new ACL entry on the specified bucket. -.. _FileToGoogleCloudStorageOperator: +:class:`airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator` + Creates a new ACL entry on the specified object. -FileToGoogleCloudStorageOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator` + Downloads a file from Google Cloud Storage. -.. autoclass:: airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator - :noindex: +:class:`airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator` + List all objects from the bucket with the give string prefix and delimiter in name. -.. _GoogleCloudStorageCreateBucketOperator: +:class:`airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator` + Creates a new cloud storage bucket. -GoogleCloudStorageBucketCreateAclEntryOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator` + Loads files from Google cloud storage into BigQuery. -.. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator - :noindex: +:class:`airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator` + Copies objects from a bucket to another, with renaming if requested. -.. _GoogleCloudStorageBucketCreateAclEntryOperator: +:class:`airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator` + Copy data from any MySQL Database to Google cloud storage in JSON format. -GoogleCloudStorageCreateBucketOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. autoclass:: airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator - :noindex: +They also use :class:`airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook` to communicate with Google Cloud Platform. -.. _GoogleCloudStorageDownloadOperator: -GoogleCloudStorageDownloadOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator - :noindex: - -.. _GoogleCloudStorageListOperator: - -GoogleCloudStorageListOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator - :noindex: - -.. _GoogleCloudStorageToBigQueryOperator: - -GoogleCloudStorageObjectCreateAclEntryOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator - :noindex: - -.. _GoogleCloudStorageObjectCreateAclEntryOperator: - -GoogleCloudStorageToBigQueryOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator - :noindex: - -.. _GoogleCloudStorageToGoogleCloudStorageOperator: - -GoogleCloudStorageToGoogleCloudStorageOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator - :noindex: - -.. _GoogleCloudStorageToGoogleCloudStorageTransferOperator: - -GoogleCloudStorageToGoogleCloudStorageTransferOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcs_to_gcs_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator - :noindex: - -.. _MySqlToGoogleCloudStorageOperator: - -MySqlToGoogleCloudStorageOperator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator - :noindex: +Transfer Service +'''''''''''''''' -GoogleCloudStorageHook -"""""""""""""""""""""" +:class:`airflow.contrib.operators.gcs_to_gcs_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator` + Copies objects from a bucket to another using Google Transfer service. -.. autoclass:: airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook - :members: - :noindex: -GCPTransferServiceHook -"""""""""""""""""""""" +They also use :class:`airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook` to communicate with Google Cloud Platform. -.. autoclass:: airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook - :members: - :noindex: Google Kubernetes Engine '''''''''''''''''''''''' -Google Kubernetes Engine Cluster Operators -"""""""""""""""""""""""""""""""""""""""""" - -- :ref:`GKEClusterCreateOperator` : Creates a Kubernetes Cluster in Google Cloud Platform -- :ref:`GKEClusterDeleteOperator` : Deletes a Kubernetes Cluster in Google Cloud Platform - -GKEClusterCreateOperator -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator - :noindex: - -.. _GKEClusterCreateOperator: - -GKEClusterDeleteOperator -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator - :noindex: - -.. _GKEClusterDeleteOperator: +:class:`airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator` + Creates a Kubernetes Cluster in Google Cloud Platform -GKEPodOperator -^^^^^^^^^^^^^^ +:class:`airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator` + Deletes a Kubernetes Cluster in Google Cloud Platform -.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEPodOperator - :noindex: +:class:`airflow.contrib.operators.gcp_container_operator.GKEPodOperator` + Executes a task in a Kubernetes pod in the specified Google Kubernetes Engine cluster -.. _GKEPodOperator: +They also use :class:`airflow.contrib.hooks.gcp_container_hook.GKEClusterHook` to communicate with Google Cloud Platform. -Google Kubernetes Engine Hook -""""""""""""""""""""""""""""" - -.. autoclass:: airflow.contrib.hooks.gcp_container_hook.GKEClusterHook - :members: - :noindex: .. _Qubole: @@ -1560,32 +724,23 @@ Qubole Apache Airflow has a native operator and hooks to talk to `Qubole `__, which lets you submit your big data jobs directly to Qubole from Apache Airflow. -QuboleOperator -'''''''''''''' - -.. autoclass:: airflow.contrib.operators.qubole_operator.QuboleOperator - :noindex: -QubolePartitionSensor -''''''''''''''''''''' +:class:`airflow.contrib.operators.qubole_operator.QuboleOperator` + Execute tasks (commands) on QDS (https://qubole.com). -.. autoclass:: airflow.contrib.sensors.qubole_sensor.QubolePartitionSensor - :noindex: - -QuboleFileSensor -'''''''''''''''' +:class:`airflow.contrib.sensors.qubole_sensor.QubolePartitionSensor` + Wait for a Hive partition to show up in QHS (Qubole Hive Service) + and check for its presence via QDS APIs -.. autoclass:: airflow.contrib.sensors.qubole_sensor.QuboleFileSensor - :noindex: +:class:`airflow.contrib.sensors.qubole_sensor.QuboleFileSensor` + Wait for a file or folder to be present in cloud storage + and check for its presence via QDS APIs -QuboleCheckOperator -''''''''''''''''''' - -.. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator - :noindex: - -QuboleValueCheckOperator -'''''''''''''''''''''''' +:class:`airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator` + Performs checks against Qubole Commands. ``QuboleCheckOperator`` expects + a command that will be executed on QDS. -.. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator - :noindex: +:class:`airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator` + Performs a simple value check using Qubole command. + By default, each value on the first row of this + Qubole command is compared with a pre-defined value From 6f122f4fc55632fb9b3ce0790a19523425adefa9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Wed, 6 Feb 2019 19:45:46 +0100 Subject: [PATCH 0050/1104] [AIRFLOW-XXX] Extract reverse proxy info to a separate file (#4657) --- docs/howto/index.rst | 1 + docs/howto/run-behind-proxy.rst | 82 +++++++++++++++++++++++++++++++++ docs/integration.rst | 69 --------------------------- 3 files changed, 83 insertions(+), 69 deletions(-) create mode 100644 docs/howto/run-behind-proxy.rst diff --git a/docs/howto/index.rst b/docs/howto/index.rst index 42f2e680b7767..9d31d1694798d 100644 --- a/docs/howto/index.rst +++ b/docs/howto/index.rst @@ -37,6 +37,7 @@ configuring an Airflow environment. executor/use-celery executor/use-dask executor/use-mesos + run-behind-proxy run-with-systemd run-with-upstart use-test-config diff --git a/docs/howto/run-behind-proxy.rst b/docs/howto/run-behind-proxy.rst new file mode 100644 index 0000000000000..5db4ab0bf155e --- /dev/null +++ b/docs/howto/run-behind-proxy.rst @@ -0,0 +1,82 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Running Airflow behind a reverse proxy +====================================== + +Airflow can be set up behind a reverse proxy, with the ability to set its endpoint with great +flexibility. + +For example, you can configure your reverse proxy to get: + +:: + + https://lab.mycompany.com/myorg/airflow/ + +To do so, you need to set the following setting in your `airflow.cfg`:: + + base_url = http://my_host/myorg/airflow + +Additionally if you use Celery Executor, you can get Flower in `/myorg/flower` with:: + + flower_url_prefix = /myorg/flower + +Your reverse proxy (ex: nginx) should be configured as follow: + +- pass the url and http header as it for the Airflow webserver, without any rewrite, for example:: + + server { + listen 80; + server_name lab.mycompany.com; + + location /myorg/airflow/ { + proxy_pass http://localhost:8080; + proxy_set_header Host $host; + proxy_redirect off; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + } + } + +- rewrite the url for the flower endpoint:: + + server { + listen 80; + server_name lab.mycompany.com; + + location /myorg/flower/ { + rewrite ^/myorg/flower/(.*)$ /$1 break; # remove prefix from http header + proxy_pass http://localhost:5555; + proxy_set_header Host $host; + proxy_redirect off; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + } + } + +To ensure that Airflow generates URLs with the correct scheme when +running behind a TLS-terminating proxy, you should configure the proxy +to set the `X-Forwarded-Proto` header, and enable the `ProxyFix` +middleware in your `airflow.cfg`:: + + enable_proxy_fix = True + +.. note:: + You should only enable the `ProxyFix` middleware when running + Airflow behind a trusted proxy (AWS ELB, nginx, etc.). diff --git a/docs/integration.rst b/docs/integration.rst index cacb44201d647..93215d7b76fca 100644 --- a/docs/integration.rst +++ b/docs/integration.rst @@ -18,81 +18,12 @@ Integration =========== -- :ref:`ReverseProxy` - :ref:`Azure` - :ref:`AWS` - :ref:`Databricks` - :ref:`GCP` - :ref:`Qubole` -.. _ReverseProxy: - -Reverse Proxy -------------- - -Airflow can be set up behind a reverse proxy, with the ability to set its endpoint with great -flexibility. - -For example, you can configure your reverse proxy to get: - -:: - - https://lab.mycompany.com/myorg/airflow/ - -To do so, you need to set the following setting in your `airflow.cfg`:: - - base_url = http://my_host/myorg/airflow - -Additionally if you use Celery Executor, you can get Flower in `/myorg/flower` with:: - - flower_url_prefix = /myorg/flower - -Your reverse proxy (ex: nginx) should be configured as follow: - -- pass the url and http header as it for the Airflow webserver, without any rewrite, for example:: - - server { - listen 80; - server_name lab.mycompany.com; - - location /myorg/airflow/ { - proxy_pass http://localhost:8080; - proxy_set_header Host $host; - proxy_redirect off; - proxy_http_version 1.1; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection "upgrade"; - } - } - -- rewrite the url for the flower endpoint:: - - server { - listen 80; - server_name lab.mycompany.com; - - location /myorg/flower/ { - rewrite ^/myorg/flower/(.*)$ /$1 break; # remove prefix from http header - proxy_pass http://localhost:5555; - proxy_set_header Host $host; - proxy_redirect off; - proxy_http_version 1.1; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection "upgrade"; - } - } - -To ensure that Airflow generates URLs with the correct scheme when -running behind a TLS-terminating proxy, you should configure the proxy -to set the `X-Forwarded-Proto` header, and enable the `ProxyFix` -middleware in your `airflow.cfg`:: - - enable_proxy_fix = True - -.. note:: - You should only enable the `ProxyFix` middleware when running - Airflow behind a trusted proxy (AWS ELB, nginx, etc.). - .. _Azure: Azure: Microsoft Azure From 5275a8ff0e6a91d51b045e90b061cc841bbd860c Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Wed, 6 Feb 2019 11:49:19 -0800 Subject: [PATCH 0051/1104] [AIRFLOW-2694] Declare permissions in DAG definition (#4642) * [AIRFLOW-2694] Declare permissions in DAG definition This PR adds support for declaratively assigning DAG-level permissions to a role via the `DAG.__init__()` method. When the DAG definition is evaluated and the `access_control` argument is supplied, we update the permissions on the ViewMenu associated with this DAG according to the following rules: - If the role does not exist, we raise an exception. - If the role exists, we ensure that it has the specified set of permissions on the DAG - If any other permissions exist for the DAG that are not specified in `access_control`, we revoke them * Move RBAC constants to break circular dependency * Add license header * Sync DAG permissions via CLI and /refresh* endpoints Move the DAG-level permission syncing logic into `AirflowSecurityManager.sync_perm_for_dag`, and trigger this method from the CLI's `sync_perm` command and from the `/refresh` and `/refresh_all` web endpoints. * Default access_control to None --- airflow/bin/cli.py | 10 +++- airflow/models/__init__.py | 16 +++++- airflow/www/security.py | 80 ++++++++++++++++++++++++++++-- airflow/www/views.py | 10 ++-- tests/core.py | 31 +++++++++++- tests/www/test_security.py | 99 +++++++++++++++++++++++++++++++++++++- 6 files changed, 230 insertions(+), 16 deletions(-) diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index b2aa9191bcd44..77fddbcd3ffe3 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -1633,8 +1633,14 @@ def list_dag_runs(args, dag=None): @cli_utils.action_logging def sync_perm(args): appbuilder = cached_appbuilder() - print('Update permission, view-menu for all existing roles') + print('Updating permission, view-menu for all existing roles') appbuilder.sm.sync_roles() + print('Updating permission on all DAG views') + dags = DagBag().dags.values() + for dag in dags: + appbuilder.sm.sync_perm_for_dag( + dag.dag_id, + dag.access_control) Arg = namedtuple( @@ -2293,7 +2299,7 @@ class CLIFactory(object): }, { 'func': sync_perm, - 'help': "Update existing role's permissions.", + 'help': "Update permissions for existing roles and DAGs.", 'args': tuple(), }, { diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index f82bbbf0a6099..0d279b659d482 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -2984,6 +2984,10 @@ class DAG(BaseDag, LoggingMixin): :param on_success_callback: Much like the ``on_failure_callback`` except that it is executed when the dag succeeds. :type on_success_callback: callable + :param access_control: Specify optional DAG-level permissions, e.g., + {'role1': {'can_dag_read'}, + 'role2': {'can_dag_read', 'can_dag_edit'}} + :type access_control: dict """ def __init__( @@ -3005,7 +3009,8 @@ def __init__( orientation=configuration.conf.get('webserver', 'dag_orientation'), catchup=configuration.conf.getboolean('scheduler', 'catchup_by_default'), on_success_callback=None, on_failure_callback=None, - params=None): + params=None, + access_control=None): self.user_defined_macros = user_defined_macros self.user_defined_filters = user_defined_filters @@ -3082,6 +3087,7 @@ def __init__( self.on_failure_callback = on_failure_callback self._old_context_manager_dags = [] + self._access_control = access_control self._comps = { 'dag_id', @@ -3303,6 +3309,14 @@ def concurrency(self): def concurrency(self, value): self._concurrency = value + @property + def access_control(self): + return self._access_control + + @access_control.setter + def access_control(self, value): + self._access_control = value + @property def description(self): return self._description diff --git a/airflow/www/security.py b/airflow/www/security.py index a81f36e1dc4e9..c87691f95de42 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -24,6 +24,7 @@ from sqlalchemy import or_ from airflow import models +from airflow.exceptions import AirflowException from airflow.www.app import appbuilder from airflow.utils.db import provide_session from airflow.utils.log.logging_mixin import LoggingMixin @@ -451,12 +452,17 @@ def sync_roles(self): self.update_admin_perm_view() self.clean_perms() - def sync_perm_for_dag(self, dag_id): + def sync_perm_for_dag(self, dag_id, access_control=None): """ Sync permissions for given dag id. The dag id surely exists in our dag bag - as only / refresh button will call this function - - :param dag_id: + as only / refresh button or cli.sync_perm will call this function + + :param dag_id: the ID of the DAG whose permissions should be updated + :type dag_id: string + :param access_control: a dict where each key is a rolename and + each value is a set() of permission names (e.g., + {'can_dag_read'} + :type access_control: dict :return: """ for dag_perm in DAG_PERMS: @@ -464,6 +470,72 @@ def sync_perm_for_dag(self, dag_id): if perm_on_dag is None: self.add_permission_view_menu(dag_perm, dag_id) + if access_control: + self._sync_dag_view_permissions(dag_id, access_control) + + def _sync_dag_view_permissions(self, dag_id, access_control): + """Set the access policy on the given DAG's ViewModel. + + :param dag_id: the ID of the DAG whose permissions should be updated + :type dag_id: string + :param access_control: a dict where each key is a rolename and + each value is a set() of permission names (e.g., + {'can_dag_read'} + :type access_control: dict + """ + def _get_or_create_dag_permission(perm_name): + dag_perm = self.find_permission_view_menu(perm_name, dag_id) + if not dag_perm: + self.log.info("Creating new permission '{}' on view '{}'".format( + perm_name, + dag_id + )) + dag_perm = self.add_permission_view_menu(perm_name, dag_id) + + return dag_perm + + def _revoke_stale_permissions(dag_view): + existing_dag_perms = self.find_permissions_view_menu(dag_view) + for perm in existing_dag_perms: + non_admin_roles = [role for role in perm.role + if role.name != 'Admin'] + for role in non_admin_roles: + target_perms_for_role = access_control.get(role.name, {}) + if perm.permission.name not in target_perms_for_role: + self.log.info("Revoking '{}' on DAG '{}' for role '{}'".format( + perm.permission, + dag_id, + role.name + )) + self.del_permission_role(role, perm) + + dag_view = self.find_view_menu(dag_id) + if dag_view: + _revoke_stale_permissions(dag_view) + + for rolename, perms in access_control.items(): + role = self.find_role(rolename) + if not role: + raise AirflowException( + "The access_control mapping for DAG '{}' includes a role " + "named '{}', but that role does not exist".format( + dag_id, + rolename)) + + perms = set(perms) + invalid_perms = perms - DAG_PERMS + if invalid_perms: + raise AirflowException( + "The access_control map for DAG '{}' includes the following " + "invalid permissions: {}; The set of valid permissions " + "is: {}".format(dag_id, + (perms - DAG_PERMS), + DAG_PERMS)) + + for perm_name in perms: + dag_perm = _get_or_create_dag_permission(perm_name) + self.add_permission_role(role, dag_perm) + def create_perm_vm_for_all_dag(self): """ Create perm-vm if not exist and insert into FAB security model for all-dags. diff --git a/airflow/www/views.py b/airflow/www/views.py index 0775d526e5b67..f59b2c69dac8e 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -1679,10 +1679,10 @@ def refresh(self, session=None): session.merge(orm_dag) session.commit() + dag = dagbag.get_dag(dag_id) # sync dag permission - appbuilder.sm.sync_perm_for_dag(dag_id) + appbuilder.sm.sync_perm_for_dag(dag_id, dag.access_control) - dagbag.get_dag(dag_id) flash("DAG [{}] is now fresh as a daisy".format(dag_id)) return redirect(request.referrer) @@ -1691,9 +1691,9 @@ def refresh(self, session=None): @action_logging def refresh_all(self): dagbag.collect_dags(only_if_updated=False) - for dag_id in dagbag.dags: - # sync permissions for all dags - appbuilder.sm.sync_perm_for_dag(dag_id) + # sync permissions for all dags + for dag_id, dag in dagbag.dags.items(): + appbuilder.sm.sync_perm_for_dag(dag_id, dag.access_control) flash("All DAGs are now up to date") return redirect('/') diff --git a/tests/core.py b/tests/core.py index ad68f7d88a615..9cb9369caeb1c 100644 --- a/tests/core.py +++ b/tests/core.py @@ -1313,13 +1313,40 @@ def test_cli_remove_user_role(self): "User should have been removed from role 'Viewer'" ) - def test_cli_sync_perm(self): - # test whether sync_perm cli will throw exceptions or not + @mock.patch("airflow.bin.cli.DagBag") + def test_cli_sync_perm(self, dagbag_mock): + self.expect_dagbag_contains([ + DAG('has_access_control', + access_control={ + 'Public': {'can_dag_read'} + }), + DAG('no_access_control') + ], dagbag_mock) + self.appbuilder.sm = mock.Mock() + args = self.parser.parse_args([ 'sync_perm' ]) cli.sync_perm(args) + self.appbuilder.sm.sync_roles.assert_called_once() + + self.assertEqual(2, + len(self.appbuilder.sm.sync_perm_for_dag.mock_calls)) + self.appbuilder.sm.sync_perm_for_dag.assert_any_call( + 'has_access_control', + {'Public': {'can_dag_read'}} + ) + self.appbuilder.sm.sync_perm_for_dag.assert_any_call( + 'no_access_control', + None, + ) + + def expect_dagbag_contains(self, dags, dagbag_mock): + dagbag = mock.Mock() + dagbag.dags = {dag.dag_id: dag for dag in dags} + dagbag_mock.return_value = dagbag + def test_cli_list_tasks(self): for dag_id in self.dagbag.dags.keys(): args = self.parser.parse_args(['list_tasks', dag_id]) diff --git a/tests/www/test_security.py b/tests/www/test_security.py index 611effbfa8d4b..93ebb22956ff2 100644 --- a/tests/www/test_security.py +++ b/tests/www/test_security.py @@ -30,6 +30,7 @@ from sqlalchemy import Column, Integer, String, Date, Float +from airflow.exceptions import AirflowException from airflow.www.security import AirflowSecurityManager, DAG_PERMS @@ -173,9 +174,9 @@ def test_has_access(self, mock_has_view_access): mock_has_view_access.return_value = True self.assertTrue(self.security_manager.has_access('perm', 'view', user)) - def test_sync_perm_for_dag(self): + def test_sync_perm_for_dag_creates_permissions_on_view_menus(self): test_dag_id = 'TEST_DAG' - self.security_manager.sync_perm_for_dag(test_dag_id) + self.security_manager.sync_perm_for_dag(test_dag_id, access_control=None) for dag_perm in DAG_PERMS: self.assertIsNotNone(self.security_manager. find_permission_view_menu(dag_perm, test_dag_id)) @@ -192,3 +193,97 @@ def test_has_all_dag_access(self, mock_has_role, mock_has_perm): mock_has_perm.return_value = True self.assertTrue(self.security_manager.has_all_dags_access()) + + def test_access_control_with_non_existent_role(self): + with self.assertRaises(AirflowException) as context: + self.security_manager.sync_perm_for_dag( + dag_id='access-control-test', + access_control={ + 'this-role-does-not-exist': ['can_dag_edit', 'can_dag_read'] + }) + self.assertIn("role does not exist", str(context.exception)) + + def test_access_control_with_invalid_permission(self): + invalid_permissions = [ + 'can_varimport', # a real permission, but not a member of DAG_PERMS + 'can_eat_pudding', # clearly not a real permission + ] + for permission in invalid_permissions: + self.expect_user_is_in_role(self.user, rolename='team-a') + with self.assertRaises(AirflowException) as context: + self.security_manager.sync_perm_for_dag( + 'access_control_test', + access_control={ + 'team-a': {permission} + }) + self.assertIn("invalid permissions", str(context.exception)) + + def test_access_control_is_set_on_init(self): + self.expect_user_is_in_role(self.user, rolename='team-a') + self.security_manager.sync_perm_for_dag( + 'access_control_test', + access_control={ + 'team-a': ['can_dag_edit', 'can_dag_read'] + }) + self.assert_user_has_dag_perms( + perms=['can_dag_edit', 'can_dag_read'], + dag_id='access_control_test', + ) + + self.expect_user_is_in_role(self.user, rolename='NOT-team-a') + self.assert_user_does_not_have_dag_perms( + perms=['can_dag_edit', 'can_dag_read'], + dag_id='access_control_test', + ) + + def test_access_control_stale_perms_are_revoked(self): + READ_WRITE = {'can_dag_read', 'can_dag_edit'} + READ_ONLY = {'can_dag_read'} + + self.expect_user_is_in_role(self.user, rolename='team-a') + self.security_manager.sync_perm_for_dag( + 'access_control_test', + access_control={'team-a': READ_WRITE}) + self.assert_user_has_dag_perms( + perms=READ_WRITE, + dag_id='access_control_test', + ) + + self.security_manager.sync_perm_for_dag( + 'access_control_test', + access_control={'team-a': READ_ONLY}) + self.assert_user_has_dag_perms( + perms=['can_dag_read'], + dag_id='access_control_test', + ) + self.assert_user_does_not_have_dag_perms( + perms=['can_dag_edit'], + dag_id='access_control_test', + ) + + def expect_user_is_in_role(self, user, rolename): + self.security_manager.init_role(rolename, [], []) + role = self.security_manager.find_role(rolename) + if not role: + self.security_manager.add_role(rolename) + role = self.security_manager.find_role(rolename) + user.roles = [role] + self.security_manager.update_user(user) + + def assert_user_has_dag_perms(self, perms, dag_id): + for perm in perms: + self.assertTrue( + self._has_dag_perm(perm, dag_id), + "User should have '{}' on DAG '{}'".format(perm, dag_id)) + + def assert_user_does_not_have_dag_perms(self, dag_id, perms): + for perm in perms: + self.assertFalse( + self._has_dag_perm(perm, dag_id), + "User should not have '{}' on DAG '{}'".format(perm, dag_id)) + + def _has_dag_perm(self, perm, dag_id): + return self.security_manager.has_access( + perm, + dag_id, + self.user) From 2dadee7a248442012e835eeaad3f21f698d0642b Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Wed, 6 Feb 2019 20:34:50 -0800 Subject: [PATCH 0052/1104] [AIRFLOW-3813] Add CLI commands to manage roles (#4658) * [AIRFLOW-3813] Add CLI commands to manage roles Here is the help text of the new command `airflow roles`: usage: airflow roles [-h] [-c] [-l] [role [role ...]] positional arguments: role The name of a role optional arguments: -h, --help show this help message and exit -c, --create Create a new role -l, --list List roles Create is reentrant, i.e., it only adds a new role if it does not exist. * Update docs on role creation --- airflow/bin/cli.py | 42 +++++++++++++++++++++++++++++++++++-- airflow/www/security.py | 17 +++++++++++++++ docs/howto/add-new-role.rst | 11 +++++++--- tests/core.py | 42 +++++++++++++++++++++++++++++++++++++ 4 files changed, 107 insertions(+), 5 deletions(-) diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index 77fddbcd3ffe3..05f440c5233a5 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -1578,6 +1578,28 @@ def _import_users(users_list): return users_created, users_updated +@cli_utils.action_logging +def roles(args): + if args.create and args.list: + raise AirflowException("Please specify either --create or --list, " + "but not both") + + appbuilder = cached_appbuilder() + if args.create: + for role_name in args.role: + appbuilder.sm.add_role(role_name) + elif args.list: + roles = appbuilder.sm.get_all_roles() + print("Existing roles:\n") + role_names = sorted([[r.name] for r in roles]) + msg = tabulate(role_names, + headers=['Role'], + tablefmt="fancy_grid") + if sys.version_info[0] < 3: + msg = msg.encode('utf-8') + print(msg) + + @cli_utils.action_logging def list_dag_runs(args, dag=None): if dag: @@ -2125,6 +2147,19 @@ class CLIFactory(object): ("-e", "--export"), metavar="FILEPATH", help="Export users to JSON file"), + # roles + 'create_role': Arg( + ('-c', '--create'), + help='Create a new role', + action='store_true'), + 'list_roles': Arg( + ('-l', '--list'), + help='List roles', + action='store_true'), + 'roles': Arg( + ('role',), + help='The name of a role', + nargs='*'), 'autoscale': Arg( ('-a', '--autoscale'), help="Minimum and Maximum number of worker to autoscale"), @@ -2296,8 +2331,11 @@ class CLIFactory(object): 'add_role', 'remove_role', 'user_import', 'user_export', 'username', 'email', 'firstname', 'lastname', 'role', 'password', 'use_random_password'), - }, - { + }, { + 'func': roles, + 'help': 'Create/List roles', + 'args': ('create_role', 'list_roles', 'roles'), + }, { 'func': sync_perm, 'help': "Update permissions for existing roles and DAGs.", 'args': tuple(), diff --git a/airflow/www/security.py b/airflow/www/security.py index c87691f95de42..7055228793364 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -195,6 +195,23 @@ def init_role(self, role_name, role_vms, role_perms): else: self.log.info('Existing permissions for the role:%s within the database will persist.', role_name) + def delete_role(self, role_name): + """Delete the given Role + + :param role_name: the name of a role in the ab_role table + """ + session = self.get_session + role = session.query(sqla_models.Role)\ + .filter(sqla_models.Role.name == role_name)\ + .first() + if role: + self.log.info("Deleting role '{}'".format(role_name)) + session.delete(role) + session.commit() + else: + raise AirflowException("Role named '{}' does not exist".format( + role_name)) + def get_user_roles(self, user=None): """ Get all the roles associated with the user. diff --git a/docs/howto/add-new-role.rst b/docs/howto/add-new-role.rst index 60d718ed1eb77..b82633817d232 100644 --- a/docs/howto/add-new-role.rst +++ b/docs/howto/add-new-role.rst @@ -28,7 +28,12 @@ and click ``List Roles`` in the new UI. .. image:: ../img/add-role.png .. image:: ../img/new-role.png +The image shows the creation of a role which can only write to +``example_python_operator``. You can also create roles via the CLI +using the ``airflow roles`` command, e.g.: -The image shows a role which could only write to example_python_operator is created. -And we could assign the given role to a new user using ``airflow users --add-role`` cli command. -Default roles(Admin, User, Viewer, Op) shiped with RBAC could view the details for every dag. + airflow roles --create Role1 Role2 + +And we could assign the given role to a new user using the ``airflow +users --add-role`` CLI command. Default roles(Admin, User, Viewer, +Op) shipped with RBAC could view the details for every dag. diff --git a/tests/core.py b/tests/core.py index 9cb9369caeb1c..0fbe29952e66d 100644 --- a/tests/core.py +++ b/tests/core.py @@ -1089,6 +1089,10 @@ def tearDown(self): test_user = self.appbuilder.sm.find_user(email=email) if test_user: self.appbuilder.sm.del_register_user(test_user) + for role_name in ['FakeTeamA', 'FakeTeamB']: + if self.appbuilder.sm.find_role(role_name): + self.appbuilder.sm.delete_role(role_name) + super(CliTests, self).tearDown() @staticmethod @@ -1347,6 +1351,44 @@ def expect_dagbag_contains(self, dags, dagbag_mock): dagbag.dags = {dag.dag_id: dag for dag in dags} dagbag_mock.return_value = dagbag + def test_cli_create_roles(self): + self.assertIsNone(self.appbuilder.sm.find_role('FakeTeamA')) + self.assertIsNone(self.appbuilder.sm.find_role('FakeTeamB')) + + args = self.parser.parse_args([ + 'roles', '--create', 'FakeTeamA', 'FakeTeamB' + ]) + cli.roles(args) + + self.assertIsNotNone(self.appbuilder.sm.find_role('FakeTeamA')) + self.assertIsNotNone(self.appbuilder.sm.find_role('FakeTeamB')) + + def test_cli_create_roles_is_reentrant(self): + self.assertIsNone(self.appbuilder.sm.find_role('FakeTeamA')) + self.assertIsNone(self.appbuilder.sm.find_role('FakeTeamB')) + + args = self.parser.parse_args([ + 'roles', '--create', 'FakeTeamA', 'FakeTeamB' + ]) + + cli.roles(args) + cli.roles(args) + + self.assertIsNotNone(self.appbuilder.sm.find_role('FakeTeamA')) + self.assertIsNotNone(self.appbuilder.sm.find_role('FakeTeamB')) + + def test_cli_list_roles(self): + self.appbuilder.sm.add_role('FakeTeamA') + self.appbuilder.sm.add_role('FakeTeamB') + + with mock.patch('sys.stdout', + new_callable=six.StringIO) as mock_stdout: + cli.roles(self.parser.parse_args(['roles', '-l'])) + stdout = mock_stdout.getvalue() + + self.assertIn('FakeTeamA', stdout) + self.assertIn('FakeTeamB', stdout) + def test_cli_list_tasks(self): for dag_id in self.dagbag.dags.keys(): args = self.parser.parse_args(['list_tasks', dag_id]) From a1d5b01b10c0f7107b88b46df1cf4ddd5bc0bbde Mon Sep 17 00:00:00 2001 From: mans2singh Date: Thu, 7 Feb 2019 00:43:13 -0500 Subject: [PATCH 0053/1104] [AIRFLOW-3817] - Corrected task ids returned by BranchPythonOperator to match the dummy operator ids (#4659) --- airflow/example_dags/example_branch_python_dop_operator_3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airflow/example_dags/example_branch_python_dop_operator_3.py b/airflow/example_dags/example_branch_python_dop_operator_3.py index 950d5c1ae253b..36edb27317344 100644 --- a/airflow/example_dags/example_branch_python_dop_operator_3.py +++ b/airflow/example_dags/example_branch_python_dop_operator_3.py @@ -42,9 +42,9 @@ def should_run(**kwargs): print('------------- exec dttm = {} and minute = {}'. format(kwargs['execution_date'], kwargs['execution_date'].minute)) if kwargs['execution_date'].minute % 2 == 0: - return "oper_1" + return "dummy_task_1" else: - return "oper_2" + return "dummy_task_2" cond = BranchPythonOperator( From ba66fa7e774458c8f2fc14fb742847f63508700a Mon Sep 17 00:00:00 2001 From: mans2singh Date: Thu, 7 Feb 2019 00:44:36 -0500 Subject: [PATCH 0054/1104] [AIRFLOW-3802] Updated documentation for HiveServer2Hook (#4647) --- airflow/hooks/hive_hooks.py | 46 ++++++++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py index 224a9e163fb1f..68a7cfe7eb106 100644 --- a/airflow/hooks/hive_hooks.py +++ b/airflow/hooks/hive_hooks.py @@ -761,6 +761,9 @@ def __init__(self, hiveserver2_conn_id='hiveserver2_default'): self.hiveserver2_conn_id = hiveserver2_conn_id def get_conn(self, schema=None): + """ + Returns a Hive connection object. + """ db = self.get_connection(self.hiveserver2_conn_id) auth_mechanism = db.extra_dejson.get('authMechanism', 'NONE') if auth_mechanism == 'NONE' and db.login is None: @@ -836,11 +839,17 @@ def _get_results(self, hql, schema='default', fetch_size=None, hive_conf=None): def get_results(self, hql, schema='default', fetch_size=None, hive_conf=None): """ Get results of the provided hql in target schema. + :param hql: hql to be executed. + :type hql: str or list :param schema: target schema, default to 'default'. - :param fetch_size max size of result to fetch. + :type schema: str + :param fetch_size max: size of result to fetch. + :type fetch_size_max: int :param hive_conf: hive_conf to execute alone with the hql. - :return: results of hql execution. + :type hive_conf: dict + :return: results of hql execution, dict with data (list of results) and header + :rtype: dict """ results_iter = self._get_results(hql, schema, fetch_size=fetch_size, hive_conf=hive_conf) @@ -863,15 +872,24 @@ def to_csv( hive_conf=None): """ Execute hql in target schema and write results to a csv file. + :param hql: hql to be executed. + :type hql: str or list :param csv_filepath: filepath of csv to write results into. + :type csv_filepath: str :param schema: target schema, default to 'default'. - :param delimiter: delimiter of the csv file. + :type schema: str + :param delimiter: delimiter of the csv file, default to ','. + :type delimiter: str :param lineterminator: lineterminator of the csv file. - :param output_header: header of the csv file. - :param fetch_size: number of result rows to write into the csv file. + :type lineterminator: str + :param output_header: header of the csv file, default to True. + :type output_header: bool + :param fetch_size: number of result rows to write into the csv file, default to 1000. + :type fetch_size: int :param hive_conf: hive_conf to execute alone with the hql. - :return: + :type hive_conf: dict + """ results_iter = self._get_results(hql, schema, @@ -907,6 +925,15 @@ def get_records(self, hql, schema='default', hive_conf=None): """ Get a set of records from a Hive query. + :param hql: hql to be executed. + :type hql: str or list + :param schema: target schema, default to 'default'. + :type schema: str + :param hive_conf: hive_conf to execute alone with the hql. + :type hive_conf: dict + :return: result of hive execution + :rtype: list + >>> hh = HiveServer2Hook() >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100" >>> len(hh.get_records(sql)) @@ -918,6 +945,13 @@ def get_pandas_df(self, hql, schema='default'): """ Get a pandas dataframe from a Hive query + :param hql: hql to be executed. + :type hql: str or list + :param schema: target schema, default to 'default'. + :type schema: str + :return: result of hql execution + :rtype: DataFrame + >>> hh = HiveServer2Hook() >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100" >>> df = hh.get_pandas_df(sql) From 4230404abcf09909ac03d80ba7ade2d48b42cf9e Mon Sep 17 00:00:00 2001 From: "T. Tanay" Date: Thu, 7 Feb 2019 17:02:48 +0530 Subject: [PATCH 0055/1104] [AIRFLOW-3643] Add shebang to docs/start_doc_server.sh (#4650) Since this script uses bash syntax, shebang needs to be added. --- docs/start_doc_server.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/start_doc_server.sh b/docs/start_doc_server.sh index c5c8cba0f7168..da4876bed6b05 100755 --- a/docs/start_doc_server.sh +++ b/docs/start_doc_server.sh @@ -1,3 +1,5 @@ +#!/usr/bin/env bash + # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -6,9 +8,9 @@ # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY From 13c63ffad05817bf4ed6ef948dc9672c26f8ffb6 Mon Sep 17 00:00:00 2001 From: Penumbra69 Date: Thu, 7 Feb 2019 03:37:25 -0800 Subject: [PATCH 0056/1104] [AIRFLOW-3647] Add archives config option to SparkSubmitOperator (#4467) To enable to spark behavior of transporting and extracting an archive on job launch, making the _contents_ of the archive available to the driver as well as the workers (not just the jar or archive as a zip file) - this configuration attribute is necessary. This is required if you have no ability to modify the Python env on the worker / driver nodes, but you wish to use versions, modules, or features not installed. We transport a full Python 3.5 environment to our CDH cluster using this option and the alias "#PYTHON" paired an additional configuration to spark to use it: --archives "hdfs:///user/myuser/my_python_env.zip#PYTHON" --conf "spark.yarn.appMasterEnv.PYSPARK_PYTHON=./PYTHON/python35/bin/python3" --- airflow/contrib/hooks/spark_submit_hook.py | 6 ++++++ airflow/contrib/operators/spark_submit_operator.py | 3 +++ tests/contrib/hooks/test_spark_submit_hook.py | 2 ++ tests/contrib/operators/test_spark_submit_operator.py | 3 +++ 4 files changed, 14 insertions(+) diff --git a/airflow/contrib/hooks/spark_submit_hook.py b/airflow/contrib/hooks/spark_submit_hook.py index 33fb55af06e34..dd6372803180f 100644 --- a/airflow/contrib/hooks/spark_submit_hook.py +++ b/airflow/contrib/hooks/spark_submit_hook.py @@ -45,6 +45,8 @@ class SparkSubmitHook(BaseHook, LoggingMixin): :type files: str :param py_files: Additional python files used by the job, can be .zip, .egg or .py. :type py_files: str + :param: archives: Archives that spark should unzip (and possibly tag with #ALIAS) into + the application working directory. :param driver_classpath: Additional, driver-specific, classpath settings. :type driver_classpath: str :param jars: Submit additional jars to upload and place them in executor classpath. @@ -94,6 +96,7 @@ def __init__(self, conn_id='spark_default', files=None, py_files=None, + archives=None, driver_classpath=None, jars=None, java_class=None, @@ -116,6 +119,7 @@ def __init__(self, self._conn_id = conn_id self._files = files self._py_files = py_files + self._archives = archives self._driver_classpath = driver_classpath self._jars = jars self._java_class = java_class @@ -244,6 +248,8 @@ def _build_spark_submit_command(self, application): connection_cmd += ["--files", self._files] if self._py_files: connection_cmd += ["--py-files", self._py_files] + if self._archives: + connection_cmd += ["--archives", self._archives] if self._driver_classpath: connection_cmd += ["--driver-classpath", self._driver_classpath] if self._jars: diff --git a/airflow/contrib/operators/spark_submit_operator.py b/airflow/contrib/operators/spark_submit_operator.py index 040b1c1423e88..2803850230d4d 100644 --- a/airflow/contrib/operators/spark_submit_operator.py +++ b/airflow/contrib/operators/spark_submit_operator.py @@ -97,6 +97,7 @@ def __init__(self, conn_id='spark_default', files=None, py_files=None, + archives=None, driver_classpath=None, jars=None, java_class=None, @@ -122,6 +123,7 @@ def __init__(self, self._conf = conf self._files = files self._py_files = py_files + self._archives = archives self._driver_classpath = driver_classpath self._jars = jars self._java_class = java_class @@ -152,6 +154,7 @@ def execute(self, context): conn_id=self._conn_id, files=self._files, py_files=self._py_files, + archives=self._archives, driver_classpath=self._driver_classpath, jars=self._jars, java_class=self._java_class, diff --git a/tests/contrib/hooks/test_spark_submit_hook.py b/tests/contrib/hooks/test_spark_submit_hook.py index 01c4f65de9c53..45b10d42bce5b 100644 --- a/tests/contrib/hooks/test_spark_submit_hook.py +++ b/tests/contrib/hooks/test_spark_submit_hook.py @@ -38,6 +38,7 @@ class TestSparkSubmitHook(unittest.TestCase): 'conn_id': 'default_spark', 'files': 'hive-site.xml', 'py_files': 'sample_library.py', + 'archives': 'sample_archive.zip#SAMPLE', 'jars': 'parquet.jar', 'packages': 'com.databricks:spark-avro_2.11:3.2.0', 'exclude_packages': 'org.bad.dependency:1.0.0', @@ -143,6 +144,7 @@ def test_build_spark_submit_command(self): '--conf', 'parquet.compression=SNAPPY', '--files', 'hive-site.xml', '--py-files', 'sample_library.py', + '--archives', 'sample_archive.zip#SAMPLE', '--jars', 'parquet.jar', '--packages', 'com.databricks:spark-avro_2.11:3.2.0', '--exclude-packages', 'org.bad.dependency:1.0.0', diff --git a/tests/contrib/operators/test_spark_submit_operator.py b/tests/contrib/operators/test_spark_submit_operator.py index b8b05d2ece58d..b865fda930108 100644 --- a/tests/contrib/operators/test_spark_submit_operator.py +++ b/tests/contrib/operators/test_spark_submit_operator.py @@ -40,6 +40,7 @@ class TestSparkSubmitOperator(unittest.TestCase): }, 'files': 'hive-site.xml', 'py_files': 'sample_library.py', + 'archives': 'sample_archive.zip#SAMPLE', 'driver_classpath': 'parquet.jar', 'jars': 'parquet.jar', 'packages': 'com.databricks:spark-avro_2.11:3.2.0', @@ -91,6 +92,7 @@ def test_execute(self): }, 'files': 'hive-site.xml', 'py_files': 'sample_library.py', + 'archives': 'sample_archive.zip#SAMPLE', 'driver_classpath': 'parquet.jar', 'jars': 'parquet.jar', 'packages': 'com.databricks:spark-avro_2.11:3.2.0', @@ -122,6 +124,7 @@ def test_execute(self): self.assertEqual(expected_dict['conf'], operator._conf) self.assertEqual(expected_dict['files'], operator._files) self.assertEqual(expected_dict['py_files'], operator._py_files) + self.assertEqual(expected_dict['archives'], operator._archives) self.assertEqual(expected_dict['driver_classpath'], operator._driver_classpath) self.assertEqual(expected_dict['jars'], operator._jars) self.assertEqual(expected_dict['packages'], operator._packages) From 6b38649fa6cdf16055c7f5458050c70f39cac8fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Thu, 7 Feb 2019 12:57:32 +0100 Subject: [PATCH 0057/1104] [AIRFLOW-XXX] Improve linking to classes (#4655) --- airflow/configuration.py | 3 +- .../hooks/azure_container_instance_hook.py | 4 +- airflow/contrib/hooks/bigquery_hook.py | 10 +- airflow/contrib/hooks/datadog_hook.py | 4 +- airflow/contrib/hooks/gcp_bigtable_hook.py | 3 +- airflow/contrib/hooks/gcp_container_hook.py | 6 +- airflow/contrib/hooks/gcp_spanner_hook.py | 8 +- airflow/contrib/hooks/gcp_sql_hook.py | 2 +- airflow/contrib/hooks/gcs_hook.py | 2 +- airflow/contrib/hooks/mongo_hook.py | 6 +- airflow/contrib/hooks/sagemaker_hook.py | 4 +- airflow/contrib/hooks/spark_submit_hook.py | 2 +- airflow/contrib/hooks/ssh_hook.py | 2 +- airflow/contrib/kubernetes/pod.py | 4 +- airflow/contrib/kubernetes/secret.py | 8 +- airflow/contrib/kubernetes/volume.py | 4 +- .../azure_container_instances_operator.py | 20 +-- .../contrib/operators/bigquery_operator.py | 2 +- .../contrib/operators/databricks_operator.py | 4 +- .../contrib/operators/dataflow_operator.py | 6 +- .../contrib/operators/dataproc_operator.py | 6 +- .../operators/gcp_bigtable_operator.py | 6 +- .../operators/gcp_container_operator.py | 2 +- .../contrib/operators/gcp_spanner_operator.py | 2 +- airflow/contrib/operators/gcp_sql_operator.py | 4 +- airflow/contrib/operators/gcs_to_bq.py | 4 +- airflow/contrib/operators/gcs_to_gcs.py | 2 +- .../operators/kubernetes_pod_operator.py | 14 +- .../contrib/operators/mlengine_operator.py | 2 +- .../operators/mlengine_operator_utils.py | 6 +- .../operators/qubole_check_operator.py | 4 +- airflow/contrib/operators/sftp_operator.py | 2 +- airflow/contrib/operators/ssh_operator.py | 2 +- airflow/contrib/operators/winrm_operator.py | 2 +- airflow/contrib/sensors/weekday_sensor.py | 2 +- airflow/contrib/utils/gcp_field_sanitizer.py | 2 +- airflow/contrib/utils/gcp_field_validator.py | 2 +- airflow/dag/base_dag.py | 2 +- airflow/exceptions.py | 2 +- airflow/executors/celery_executor.py | 4 +- airflow/executors/local_executor.py | 6 +- airflow/hooks/hdfs_hook.py | 2 +- airflow/hooks/hive_hooks.py | 12 +- airflow/jobs.py | 90 +++++------ airflow/macros/hive.py | 4 +- airflow/models/__init__.py | 48 +++--- airflow/models/taskreschedule.py | 2 +- airflow/operators/docker_operator.py | 4 +- airflow/operators/generic_transfer.py | 2 +- airflow/operators/hive_to_druid.py | 2 +- airflow/operators/mysql_operator.py | 7 +- airflow/operators/oracle_operator.py | 5 +- airflow/operators/python_operator.py | 12 +- airflow/operators/subdag_operator.py | 6 +- airflow/plugins_manager.py | 4 +- airflow/sensors/hdfs_sensor.py | 12 +- .../sensors/named_hive_partition_sensor.py | 2 +- airflow/settings.py | 2 +- airflow/ti_deps/dep_context.py | 2 +- airflow/ti_deps/deps/base_ti_dep.py | 16 +- airflow/ti_deps/deps/trigger_rule_dep.py | 4 +- airflow/utils/dag_processing.py | 44 +++--- airflow/utils/log/es_task_handler.py | 2 +- airflow/utils/log/file_processor_handler.py | 2 +- airflow/utils/log/file_task_handler.py | 2 +- airflow/utils/operator_helpers.py | 2 +- airflow/www/security.py | 17 ++- docs/code.rst | 2 + docs/concepts.rst | 21 ++- docs/conf.py | 10 +- docs/howto/executor/use-dask.rst | 4 +- docs/howto/executor/use-mesos.rst | 4 +- docs/installation.rst | 142 +++++++++--------- docs/scheduler.rst | 6 +- docs/start.rst | 2 +- 75 files changed, 355 insertions(+), 325 deletions(-) diff --git a/airflow/configuration.py b/airflow/configuration.py index 1de5c3aeb1354..0de6bb9947e2a 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -313,8 +313,9 @@ def getsection(self, section): """ Returns the section as a dict. Values are converted to int, float, bool as required. + :param section: section from the config - :return: dict + :rtype: dict """ if (section not in self._sections and section not in self.airflow_defaults._sections): diff --git a/airflow/contrib/hooks/azure_container_instance_hook.py b/airflow/contrib/hooks/azure_container_instance_hook.py index 95386d55ffcbb..d48211e26d124 100644 --- a/airflow/contrib/hooks/azure_container_instance_hook.py +++ b/airflow/contrib/hooks/azure_container_instance_hook.py @@ -124,7 +124,7 @@ def get_messages(self, resource_group, name): :param name: the name of the container group :type name: str :return: A list of the event messages - :rtype: list + :rtype: list[str] """ instance_view = self._get_instance_view(resource_group, name) @@ -141,7 +141,7 @@ def get_logs(self, resource_group, name, tail=1000): :param tail: the size of the tail :type tail: int :return: A list of log messages - :rtype: list + :rtype: list[str] """ logs = self.connection.container.list_logs(resource_group, name, name, tail=tail) return logs.content.splitlines(True) diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py index 9ec7e876afae7..b7dffd0a98974 100644 --- a/airflow/contrib/hooks/bigquery_hook.py +++ b/airflow/contrib/hooks/bigquery_hook.py @@ -681,10 +681,10 @@ def run_query(self, :param create_disposition: Specifies whether the job is allowed to create new tables. :type create_disposition: str - :param query_params a dictionary containing query parameter types and + :param query_params: a dictionary containing query parameter types and values, passed to BigQuery :type query_params: dict - :param labels a dictionary containing labels for the job/query, + :param labels: a dictionary containing labels for the job/query, passed to BigQuery :type labels: dict :param schema_update_options: Allows the schema of the destination @@ -700,7 +700,7 @@ def run_query(self, :param cluster_fields: Request that the result of this query be stored sorted by one or more columns. This is only available in combination with time_partitioning. The order of columns given determines the sort order. - :type cluster_fields: list of str + :type cluster_fields: list[str] :param location: The geographic location of the job. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location @@ -932,7 +932,7 @@ def run_copy(self, :type write_disposition: str :param create_disposition: The create disposition if the table doesn't exist. :type create_disposition: str - :param labels a dictionary containing labels for the job/query, + :param labels: a dictionary containing labels for the job/query, passed to BigQuery :type labels: dict """ @@ -1064,7 +1064,7 @@ def run_load(self, :param cluster_fields: Request that the result of this load be stored sorted by one or more columns. This is only available in combination with time_partitioning. The order of columns given determines the sort order. - :type cluster_fields: list of str + :type cluster_fields: list[str] """ # bigquery only allows certain source formats diff --git a/airflow/contrib/hooks/datadog_hook.py b/airflow/contrib/hooks/datadog_hook.py index 45aa09a094a5f..50209dd112e06 100644 --- a/airflow/contrib/hooks/datadog_hook.py +++ b/airflow/contrib/hooks/datadog_hook.py @@ -135,8 +135,8 @@ def post_event(self, title, text, aggregation_key=None, alert_type=None, date_ha :type priority: str :param related_event_id: Post event as a child of the given event :type related_event_id: id - :param tags: List of string tags to apply to the event - :type tags: list + :param tags: List of tags to apply to the event + :type tags: list[str] :param device_name: device_name to post the event with :type device_name: list """ diff --git a/airflow/contrib/hooks/gcp_bigtable_hook.py b/airflow/contrib/hooks/gcp_bigtable_hook.py index aeca2b475f566..d50a95deb46c7 100644 --- a/airflow/contrib/hooks/gcp_bigtable_hook.py +++ b/airflow/contrib/hooks/gcp_bigtable_hook.py @@ -185,7 +185,8 @@ def create_table(instance, initially split the table. :type column_families: dict :param column_families: (Optional) A map of columns to create. The key is the - column_id str, and the value is a ``GarbageCollectionRule``. + column_id str, and the value is a + :class:`google.cloud.bigtable.column_family.GarbageCollectionRule`. """ if column_families is None: column_families = {} diff --git a/airflow/contrib/hooks/gcp_container_hook.py b/airflow/contrib/hooks/gcp_container_hook.py index 30096c14f4d7c..68bf1785b6b0e 100644 --- a/airflow/contrib/hooks/gcp_container_hook.py +++ b/airflow/contrib/hooks/gcp_container_hook.py @@ -75,7 +75,7 @@ def wait_for_operation(self, operation, project_id=None): completion or an error occurring :param operation: The Operation to wait for - :type operation: A google.cloud.container_V1.gapic.enums.Operator + :type operation: google.cloud.container_V1.gapic.enums.Operation :param project_id: Google Cloud Platform project ID :type project_id: str :return: A new, updated operation fetched from Google Cloud @@ -173,7 +173,7 @@ def create_cluster(self, cluster, project_id=None, retry=DEFAULT, timeout=DEFAUL :param cluster: A Cluster protobuf or dict. If dict is provided, it must be of the same form as the protobuf message - ``google.cloud.container_v1.types.Cluster`` + :class:`google.cloud.container_v1.types.Cluster` :type cluster: dict or google.cloud.container_v1.types.Cluster :param project_id: Google Cloud Platform project ID :type project_id: str @@ -232,7 +232,7 @@ def get_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT): complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float - :return: A google.cloud.container_v1.types.Cluster instance + :return: google.cloud.container_v1.types.Cluster """ self.log.info("Fetching cluster (project_id={}, zone={}, cluster_name={})".format( project_id or self.project_id, diff --git a/airflow/contrib/hooks/gcp_spanner_hook.py b/airflow/contrib/hooks/gcp_spanner_hook.py index 66e9fc52dbad0..6b442c46989f5 100644 --- a/airflow/contrib/hooks/gcp_spanner_hook.py +++ b/airflow/contrib/hooks/gcp_spanner_hook.py @@ -44,8 +44,7 @@ def _get_client(self, project_id): :param project_id: The ID of the GCP project. :type project_id: str - :return: Client for interacting with the Cloud Spanner API. See: - https://googleapis.github.io/google-cloud-python/latest/spanner/client-api.html#google.cloud.spanner_v1.client.Client + :return: google.cloud.spanner_v1.client.Client :rtype: object """ if not self._client: @@ -62,8 +61,7 @@ def get_instance(self, instance_id, project_id=None): :type project_id: str :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str - :return: Representation of a Cloud Spanner Instance. See: - https://googleapis.github.io/google-cloud-python/latest/spanner/instance-api.html#google.cloud.spanner_v1.instance.Instance + :return: google.cloud.spanner_v1.instance.Instance :rtype: object """ instance = self._get_client(project_id=project_id).instance(instance_id=instance_id) @@ -196,7 +194,7 @@ def get_database(self, instance_id, database_id, project_id=None): database. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: Database object or None if database does not exist - :rtype: Union[Database, None] + :rtype: google.cloud.spanner_v1.database.Database or None """ instance = self._get_client(project_id=project_id).instance( diff --git a/airflow/contrib/hooks/gcp_sql_hook.py b/airflow/contrib/hooks/gcp_sql_hook.py index caa3454ccad81..7b386e663e12a 100644 --- a/airflow/contrib/hooks/gcp_sql_hook.py +++ b/airflow/contrib/hooks/gcp_sql_hook.py @@ -413,7 +413,7 @@ def __init__(self, :type instance_specification: str :param gcp_conn_id: Id of Google Cloud Platform connection to use for authentication - :type: str + :type gcp_conn_id: str :param project_id: Optional id of the GCP project to connect to - it overwrites default project id taken from the GCP connection. :type project_id: str diff --git a/airflow/contrib/hooks/gcs_hook.py b/airflow/contrib/hooks/gcs_hook.py index 0b6e76803571f..6158e7a35a826 100644 --- a/airflow/contrib/hooks/gcs_hook.py +++ b/airflow/contrib/hooks/gcs_hook.py @@ -284,7 +284,7 @@ def is_updated_after(self, bucket, object, ts): storage bucket. :type object: str :param ts: The timestamp to check against. - :type ts: datetime + :type ts: datetime.datetime """ service = self.get_conn() try: diff --git a/airflow/contrib/hooks/mongo_hook.py b/airflow/contrib/hooks/mongo_hook.py index c90326f760c93..3a96bb508905a 100644 --- a/airflow/contrib/hooks/mongo_hook.py +++ b/airflow/contrib/hooks/mongo_hook.py @@ -222,10 +222,10 @@ def replace_many(self, mongo_collection, docs, :param mongo_collection: The name of the collection to update. :type mongo_collection: str :param docs: The new documents. - :type docs: list(dict) + :type docs: list[dict] :param filter_docs: A list of queries that match the documents to replace. Can be omitted; then the _id fields from docs will be used. - :type filter_docs: list(dict) + :type filter_docs: list[dict] :param mongo_db: The name of the database to use. Can be omitted; then the database from the connection string is used. :type mongo_db: str @@ -235,7 +235,7 @@ def replace_many(self, mongo_collection, docs, :param collation: An instance of :class:`~pymongo.collation.Collation`. This option is only supported on MongoDB 3.4 and above. - :type collation: :class:`~pymongo.collation.Collation` + :type collation: pymongo.collation.Collation """ collection = self.get_collection(mongo_collection, mongo_db=mongo_db) diff --git a/airflow/contrib/hooks/sagemaker_hook.py b/airflow/contrib/hooks/sagemaker_hook.py index 8ff478649de1a..80777821fef39 100644 --- a/airflow/contrib/hooks/sagemaker_hook.py +++ b/airflow/contrib/hooks/sagemaker_hook.py @@ -234,7 +234,7 @@ def get_log_conn(self): """ Establish an AWS connection for retrieving logs during training - :rtype: :py:class:`CloudWatchLog.Client` + :rtype: CloudWatchLogs.Client """ config = botocore.config.Config(retries={'max_attempts': 15}) return self.get_client_type('logs', config=config) @@ -373,7 +373,7 @@ def create_tuning_job(self, config, wait_for_completion=True, :param config: the config for tuning :type config: dict :param wait_for_completion: if the program should keep running until job finishes - :param wait_for_completion: bool + :type wait_for_completion: bool :param check_interval: the time interval in seconds which the operator will check the status of any SageMaker job :type check_interval: int diff --git a/airflow/contrib/hooks/spark_submit_hook.py b/airflow/contrib/hooks/spark_submit_hook.py index dd6372803180f..0912fb40a5138 100644 --- a/airflow/contrib/hooks/spark_submit_hook.py +++ b/airflow/contrib/hooks/spark_submit_hook.py @@ -89,7 +89,7 @@ class SparkSubmitHook(BaseHook, LoggingMixin): :type verbose: bool :param spark_binary: The command to use for spark submit. Some distros may use spark2-submit. - :type spark_binary: string + :type spark_binary: str """ def __init__(self, conf=None, diff --git a/airflow/contrib/hooks/ssh_hook.py b/airflow/contrib/hooks/ssh_hook.py index a098234d90362..956a7fd4419d5 100755 --- a/airflow/contrib/hooks/ssh_hook.py +++ b/airflow/contrib/hooks/ssh_hook.py @@ -145,7 +145,7 @@ def get_conn(self): """ Opens a ssh connection to the remote host. - :return paramiko.SSHClient object + :rtype: paramiko.client.SSHClient """ self.log.debug('Creating SSH client for conn_id: %s', self.ssh_conn_id) diff --git a/airflow/contrib/kubernetes/pod.py b/airflow/contrib/kubernetes/pod.py index 6d2977592598a..96947a79a3e95 100644 --- a/airflow/contrib/kubernetes/pod.py +++ b/airflow/contrib/kubernetes/pod.py @@ -46,9 +46,9 @@ class Pod: :param envs: A dict containing the environment variables :type envs: dict :param cmds: The command to be run on the pod - :type cmds: list str + :type cmds: list[str] :param secrets: Secrets to be launched to the pod - :type secrets: list Secret + :type secrets: list[airflow.contrib.kubernetes.secret.Secret] :param result: The result that will be returned to the operator after successful execution of the pod :type result: any diff --git a/airflow/contrib/kubernetes/secret.py b/airflow/contrib/kubernetes/secret.py index 5c1038cd84e9a..bf1526b1a393b 100644 --- a/airflow/contrib/kubernetes/secret.py +++ b/airflow/contrib/kubernetes/secret.py @@ -24,14 +24,14 @@ def __init__(self, deploy_type, deploy_target, secret, key): the user. :param deploy_type: The type of secret deploy in Kubernetes, either `env` or `volume` - :type deploy_type: ``str`` + :type deploy_type: str :param deploy_target: The environment variable when `deploy_type` `env` or file path when `deploy_type` `volume` where expose secret - :type deploy_target: ``str`` + :type deploy_target: str :param secret: Name of the secrets object in Kubernetes - :type secret: ``str`` + :type secret: str :param key: Key of the secret within the Kubernetes Secret - :type key: ``str`` + :type key: str """ self.deploy_type = deploy_type self.deploy_target = deploy_target.upper() diff --git a/airflow/contrib/kubernetes/volume.py b/airflow/contrib/kubernetes/volume.py index d5b4f60cc3850..94003fe48dcb3 100644 --- a/airflow/contrib/kubernetes/volume.py +++ b/airflow/contrib/kubernetes/volume.py @@ -23,11 +23,11 @@ def __init__(self, name, configs): """ Adds Kubernetes Volume to pod. allows pod to access features like ConfigMaps and Persistent Volumes :param name: the name of the volume mount - :type: name: str + :type name: str :param configs: dictionary of any features needed for volume. We purposely keep this vague since there are multiple volume types with changing configs. - :type: configs: dict + :type configs: dict """ self.name = name self.configs = configs diff --git a/airflow/contrib/operators/azure_container_instances_operator.py b/airflow/contrib/operators/azure_container_instances_operator.py index 896891b4d8400..7ec6ca6ab8d5e 100644 --- a/airflow/contrib/operators/azure_container_instances_operator.py +++ b/airflow/contrib/operators/azure_container_instances_operator.py @@ -62,18 +62,18 @@ class AzureContainerInstancesOperator(BaseOperator): :type image: str :param region: the region wherein this container instance should be started :type region: str - :param: environment_variables: key,value pairs containing environment + :param environment_variables: key,value pairs containing environment variables which will be passed to the running container - :type: environment_variables: dict - :param: volumes: list of volumes to be mounted to the container. + :type environment_variables: dict + :param volumes: list of volumes to be mounted to the container. Currently only Azure Fileshares are supported. - :type: volumes: list[] - :param: memory_in_gb: the amount of memory to allocate to this container - :type: memory_in_gb: double - :param: cpu: the number of cpus to allocate to this container - :type: cpu: double - :param: command: the command to run inside the container - :type: command: str + :type volumes: list[] + :param memory_in_gb: the amount of memory to allocate to this container + :type memory_in_gb: double + :param cpu: the number of cpus to allocate to this container + :type cpu: double + :param command: the command to run inside the container + :type command: str :Example: diff --git a/airflow/contrib/operators/bigquery_operator.py b/airflow/contrib/operators/bigquery_operator.py index f597db93a58a3..10e3f840a8e31 100644 --- a/airflow/contrib/operators/bigquery_operator.py +++ b/airflow/contrib/operators/bigquery_operator.py @@ -96,7 +96,7 @@ class BigQueryOperator(BaseOperator): :param cluster_fields: Request that the result of this query be stored sorted by one or more columns. This is only available in conjunction with time_partitioning. The order of columns given determines the sort order. - :type cluster_fields: list of str + :type cluster_fields: list[str] :param location: The geographic location of the job. Required except for US and EU. See details at https://cloud.google.com/bigquery/docs/locations#specifying_your_location diff --git a/airflow/contrib/operators/databricks_operator.py b/airflow/contrib/operators/databricks_operator.py index a8742e401c4fe..df3f903725ef6 100644 --- a/airflow/contrib/operators/databricks_operator.py +++ b/airflow/contrib/operators/databricks_operator.py @@ -390,7 +390,7 @@ class DatabricksRunNowOperator(BaseOperator): .. seealso:: https://docs.databricks.com/api/latest/jobs.html#run-now - :type python_params: array of strings + :type python_params: list[str] :param spark_submit_params: A list of parameters for jobs with spark submit task, e.g. "spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]. The parameters will be passed to spark-submit script as command line parameters. @@ -401,7 +401,7 @@ class DatabricksRunNowOperator(BaseOperator): .. seealso:: https://docs.databricks.com/api/latest/jobs.html#run-now - :type spark_submit_params: array of strings + :type spark_submit_params: list[str] :param timeout_seconds: The timeout for this run. By default a value of 0 is used which means to have no timeout. This field will be templated. diff --git a/airflow/contrib/operators/dataflow_operator.py b/airflow/contrib/operators/dataflow_operator.py index 07ad56ba32e82..536248c826eda 100644 --- a/airflow/contrib/operators/dataflow_operator.py +++ b/airflow/contrib/operators/dataflow_operator.py @@ -279,8 +279,8 @@ class DataFlowPythonOperator(BaseOperator): (templated). This ends up being set in the pipeline options, so any entry with key ``'jobName'`` or ``'job_name'`` in ``options`` will be overwritten. :type job_name: str - :param py_options: Additional python options. - :type pyt_options: list of strings, e.g., ["-m", "-v"]. + :param py_options: Additional python options, e.g., ["-m", "-v"]. + :type pyt_options: list[str] :param dataflow_default_options: Map of default job options. :type dataflow_default_options: dict :param options: Map of job specific options. @@ -365,7 +365,7 @@ def google_cloud_to_local(self, file_name): :param file_name: The full path of input file. :type file_name: str :return: The full path of local file. - :type: str + :type str """ if not file_name.startswith('gs://'): return file_name diff --git a/airflow/contrib/operators/dataproc_operator.py b/airflow/contrib/operators/dataproc_operator.py index 7253603b9b35e..8ff26969e32b5 100644 --- a/airflow/contrib/operators/dataproc_operator.py +++ b/airflow/contrib/operators/dataproc_operator.py @@ -60,7 +60,7 @@ class DataprocClusterCreateOperator(BaseOperator): :type storage_bucket: str :param init_actions_uris: List of GCS uri's containing dataproc initialization scripts - :type init_actions_uris: list[string] + :type init_actions_uris: list[str] :param init_action_timeout: Amount of time executable scripts in init_actions_uris has to complete :type init_action_timeout: str @@ -111,7 +111,7 @@ class DataprocClusterCreateOperator(BaseOperator): enabled networks :type internal_ip_only: bool :param tags: The GCE tags to add to all instances - :type tags: list[string] + :type tags: list[str] :param region: leave as 'global', might become relevant in the future. (templated) :type region: str :param gcp_conn_id: The connection ID to use connecting to Google Cloud Platform. @@ -123,7 +123,7 @@ class DataprocClusterCreateOperator(BaseOperator): :param service_account: The service account of the dataproc instances. :type service_account: str :param service_account_scopes: The URIs of service account scopes to be included. - :type service_account_scopes: list[string] + :type service_account_scopes: list[str] :param idle_delete_ttl: The longest duration that cluster would keep alive while staying idle. Passing this threshold will cause cluster to be auto-deleted. A duration in seconds. diff --git a/airflow/contrib/operators/gcp_bigtable_operator.py b/airflow/contrib/operators/gcp_bigtable_operator.py index c59ee5561d259..e100af12ad748 100644 --- a/airflow/contrib/operators/gcp_bigtable_operator.py +++ b/airflow/contrib/operators/gcp_bigtable_operator.py @@ -65,7 +65,7 @@ class BigtableInstanceCreateOperator(BaseOperator, BigtableValidationMixin): :param replica_cluster_id: (optional) The ID for replica cluster for the new instance. :type replica_cluster_zone: str :param replica_cluster_zone: (optional) The zone for replica cluster. - :type instance_type: IntEnum + :type instance_type: enums.IntEnum :param instance_type: (optional) The type of the instance. :type instance_display_name: str :param instance_display_name: (optional) Human-readable name of the instance. Defaults @@ -75,7 +75,7 @@ class BigtableInstanceCreateOperator(BaseOperator, BigtableValidationMixin): with the instance. :type cluster_nodes: int :param cluster_nodes: (optional) Number of nodes for cluster. - :type cluster_storage_type: IntEnum + :type cluster_storage_type: enums.IntEnum :param cluster_storage_type: (optional) The type of storage. :type timeout: int :param timeout: (optional) timeout (in seconds) for instance creation. @@ -213,7 +213,7 @@ class BigtableTableCreateOperator(BaseOperator, BigtableValidationMixin): :type column_families: dict :param column_families: (Optional) A map columns to create. The key is the column_id str and the value is a - GarbageCollectionRule + :class:`google.cloud.bigtable.column_family.GarbageCollectionRule` """ REQUIRED_ATTRIBUTES = ('instance_id', 'table_id') template_fields = ['project_id', 'instance_id', 'table_id'] diff --git a/airflow/contrib/operators/gcp_container_operator.py b/airflow/contrib/operators/gcp_container_operator.py index fda4d44b9dca9..90199c7591a4e 100644 --- a/airflow/contrib/operators/gcp_container_operator.py +++ b/airflow/contrib/operators/gcp_container_operator.py @@ -119,7 +119,7 @@ class GKEClusterCreateOperator(BaseOperator): .. seealso:: For more detail on about creating clusters have a look at the reference: - https://google-cloud-python.readthedocs.io/en/latest/container/gapic/v1/types.html#google.cloud.container_v1.types.Cluster + :class:`google.cloud.container_v1.types.Cluster` :param project_id: The Google Developers Console [project ID or project number] :type project_id: str diff --git a/airflow/contrib/operators/gcp_spanner_operator.py b/airflow/contrib/operators/gcp_spanner_operator.py index a85e1aeb76020..f78fcbde3211c 100644 --- a/airflow/contrib/operators/gcp_spanner_operator.py +++ b/airflow/contrib/operators/gcp_spanner_operator.py @@ -221,7 +221,7 @@ class CloudSpannerInstanceDatabaseDeployOperator(BaseOperator): :param database_id: The Cloud Spanner database ID. :type database_id: str :param ddl_statements: The string list containing DDL for the new database. - :type ddl_statements: list of str + :type ddl_statements: list[str] :param project_id: Optional, the ID of the project that owns the Cloud Spanner Database. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str diff --git a/airflow/contrib/operators/gcp_sql_operator.py b/airflow/contrib/operators/gcp_sql_operator.py index 5a7f348984ec0..ece91cf40456f 100644 --- a/airflow/contrib/operators/gcp_sql_operator.py +++ b/airflow/contrib/operators/gcp_sql_operator.py @@ -697,7 +697,7 @@ class CloudSqlQueryOperator(BaseOperator): so it is useless to pass it DQL queries. Note that it is responsibility of the author of the queries to make sure that the queries are idempotent. For example you can use CREATE TABLE IF NOT EXISTS to create a table. - :type sql: str or [str] + :type sql: str or list[str] :param parameters: (optional) the parameters to render the SQL query with. :type parameters: mapping or iterable :param autocommit: if True, each command is automatically committed. @@ -708,7 +708,7 @@ class CloudSqlQueryOperator(BaseOperator): :type gcp_conn_id: str :param gcp_cloudsql_conn_id: The connection ID used to connect to Google Cloud SQL its schema should be gcpcloudsql://. - See :class:`~airflow.contrib.hooks.gcp_sql_hooks.CloudSqlDatabaseHook` for + See :class:`~airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook` for details on how to define gcpcloudsql:// connection. :type gcp_cloudsql_conn_id: str """ diff --git a/airflow/contrib/operators/gcs_to_bq.py b/airflow/contrib/operators/gcs_to_bq.py index abbf380b2d0fb..c5337ce34cb8a 100644 --- a/airflow/contrib/operators/gcs_to_bq.py +++ b/airflow/contrib/operators/gcs_to_bq.py @@ -38,7 +38,7 @@ class GoogleCloudStorageToBigQueryOperator(BaseOperator): :type bucket: str :param source_objects: List of Google cloud storage URIs to load from. (templated) If source_format is 'DATASTORE_BACKUP', the list must only contain a single URI. - :type source_objects: list of str + :type source_objects: list[str] :param destination_project_dataset_table: The dotted (.).

BigQuery table to load data into. If is not included, project will be the project defined in the connection json. (templated) @@ -118,7 +118,7 @@ class GoogleCloudStorageToBigQueryOperator(BaseOperator): by one or more columns. This is only available in conjunction with time_partitioning. The order of columns given determines the sort order. Not applicable for external tables. - :type cluster_fields: list of str + :type cluster_fields: list[str] """ template_fields = ('bucket', 'source_objects', 'schema_object', 'destination_project_dataset_table') diff --git a/airflow/contrib/operators/gcs_to_gcs.py b/airflow/contrib/operators/gcs_to_gcs.py index 3a7c83d5a52ae..b694b802fc878 100644 --- a/airflow/contrib/operators/gcs_to_gcs.py +++ b/airflow/contrib/operators/gcs_to_gcs.py @@ -64,7 +64,7 @@ class GoogleCloudStorageToGoogleCloudStorageOperator(BaseOperator): :param last_modified_time: When specified, if the object(s) were modified after last_modified_time, they will be copied/moved. If tzinfo has not been set, UTC will be assumed. - :type last_modified_time: datetime + :type last_modified_time: datetime.datetime :Example: diff --git a/airflow/contrib/operators/kubernetes_pod_operator.py b/airflow/contrib/operators/kubernetes_pod_operator.py index 3993b9392199c..25b734da9b20a 100644 --- a/airflow/contrib/operators/kubernetes_pod_operator.py +++ b/airflow/contrib/operators/kubernetes_pod_operator.py @@ -37,14 +37,14 @@ class KubernetesPodOperator(BaseOperator): :param image: Docker image you wish to launch. Defaults to dockerhub.io, but fully qualified URLS will point to custom repositories :type image: str - :param: namespace: the namespace to run within kubernetes - :type: namespace: str + :param namespace: the namespace to run within kubernetes + :type namespace: str :param cmds: entrypoint of the container. (templated) The docker images's entrypoint is used if this is not provide. - :type cmds: list of str + :type cmds: list[str] :param arguments: arguments of the entrypoint. (templated) The docker image's CMD is used if this is not provided. - :type arguments: list of str + :type arguments: list[str] :param image_pull_policy: Specify a policy to cache or always pull an image :type image_pull_policy: str :param image_pull_secrets: Any image pull secrets to be given to the pod. @@ -52,9 +52,9 @@ class KubernetesPodOperator(BaseOperator): comma separated list: secret_a,secret_b :type image_pull_secrets: str :param volume_mounts: volumeMounts for launched pod - :type volume_mounts: list of VolumeMount + :type volume_mounts: list[airflow.contrib.kubernetes.volume_mount.VolumeMount] :param volumes: volumes for launched pod. Includes ConfigMaps and PersistentVolumes - :type volumes: list of Volume + :type volumes: list[airflow.contrib.kubernetes.volume.Volume] :param labels: labels to apply to the Pod :type labels: dict :param startup_timeout_seconds: timeout in seconds to startup the pod @@ -66,7 +66,7 @@ class KubernetesPodOperator(BaseOperator): :type env_vars: dict :param secrets: Kubernetes secrets to inject in the container, They can be exposed as environment vars or files in a volume. - :type secrets: list of Secret + :type secrets: list[airflow.contrib.kubernetes.secret.Secret] :param in_cluster: run kubernetes client with in_cluster configuration :type in_cluster: bool :param cluster_context: context that points to kubernetes cluster. diff --git a/airflow/contrib/operators/mlengine_operator.py b/airflow/contrib/operators/mlengine_operator.py index a43d369d44730..3e2f68c7420fe 100644 --- a/airflow/contrib/operators/mlengine_operator.py +++ b/airflow/contrib/operators/mlengine_operator.py @@ -106,7 +106,7 @@ class MLEngineBatchPredictionOperator(BaseOperator): :param input_paths: A list of GCS paths of input data for batch prediction. Accepting wildcard operator ``*``, but only at the end. (templated) - :type input_paths: list of string + :type input_paths: list[str] :param output_path: The GCS path where the prediction results are written to. (templated) diff --git a/airflow/contrib/operators/mlengine_operator_utils.py b/airflow/contrib/operators/mlengine_operator_utils.py index c2951b715d337..4a8fa0589def7 100644 --- a/airflow/contrib/operators/mlengine_operator_utils.py +++ b/airflow/contrib/operators/mlengine_operator_utils.py @@ -114,7 +114,7 @@ def validate_err_and_count(summary): :type data_format: str :param input_paths: a list of input paths to be sent to BatchPrediction. - :type input_paths: list of strings + :type input_paths: list[str] :param prediction_path: GCS path to put the prediction results in. :type prediction_path: str @@ -123,7 +123,7 @@ def validate_err_and_count(summary): - metric_fn is a function that accepts a dictionary (for an instance), and returns a tuple of metric(s) that it calculates. - metric_keys is a list of strings to denote the key of each metric. - :type metric_fn_and_keys: tuple of a function and a list of strings + :type metric_fn_and_keys: tuple of a function and a list[str] :param validate_fn: a function to validate whether the averaged metric(s) is good enough to push the model. @@ -167,7 +167,7 @@ def validate_err_and_count(summary): :type version_name: str :param dag: The `DAG` to use for all Operators. - :type dag: airflow.DAG + :type dag: airflow.models.DAG :returns: a tuple of three operators, (prediction, summary, validation) :rtype: tuple(DataFlowPythonOperator, DataFlowPythonOperator, diff --git a/airflow/contrib/operators/qubole_check_operator.py b/airflow/contrib/operators/qubole_check_operator.py index 142b07e79eb04..73e3a7b15f661 100644 --- a/airflow/contrib/operators/qubole_check_operator.py +++ b/airflow/contrib/operators/qubole_check_operator.py @@ -129,13 +129,13 @@ class QuboleValueCheckOperator(ValueCheckOperator, QuboleOperator): :type qubole_conn_id: str :param pass_value: Expected value of the query results. - :type pass_value: str/int/float + :type pass_value: str or int or float :param tolerance: Defines the permissible pass_value range, for example if tolerance is 2, the Qubole command output can be anything between -2*pass_value and 2*pass_value, without the operator erring out. - :type tolerance: int/float + :type tolerance: int or float kwargs: diff --git a/airflow/contrib/operators/sftp_operator.py b/airflow/contrib/operators/sftp_operator.py index 74e86398db3f2..4af5fe9a59979 100644 --- a/airflow/contrib/operators/sftp_operator.py +++ b/airflow/contrib/operators/sftp_operator.py @@ -37,7 +37,7 @@ class SFTPOperator(BaseOperator): :param ssh_hook: predefined ssh_hook to use for remote execution. Either `ssh_hook` or `ssh_conn_id` needs to be provided. - :type ssh_hook: :class:`SSHHook` + :type ssh_hook: airflow.contrib.hooks.ssh_hook.SSHHook :param ssh_conn_id: connection id from airflow Connections. `ssh_conn_id` will be ignored if `ssh_hook` is provided. :type ssh_conn_id: str diff --git a/airflow/contrib/operators/ssh_operator.py b/airflow/contrib/operators/ssh_operator.py index 00462ad3d6889..5686792b35d62 100644 --- a/airflow/contrib/operators/ssh_operator.py +++ b/airflow/contrib/operators/ssh_operator.py @@ -33,7 +33,7 @@ class SSHOperator(BaseOperator): :param ssh_hook: predefined ssh_hook to use for remote execution. Either `ssh_hook` or `ssh_conn_id` needs to be provided. - :type ssh_hook: :class:`SSHHook` + :type ssh_hook: airflow.contrib.hooks.ssh_hook.SSHHook :param ssh_conn_id: connection id from airflow Connections. `ssh_conn_id` will be ignored if `ssh_hook` is provided. :type ssh_conn_id: str diff --git a/airflow/contrib/operators/winrm_operator.py b/airflow/contrib/operators/winrm_operator.py index c3f96693f0e4d..dc7167c54a236 100644 --- a/airflow/contrib/operators/winrm_operator.py +++ b/airflow/contrib/operators/winrm_operator.py @@ -39,7 +39,7 @@ class WinRMOperator(BaseOperator): WinRMOperator to execute commands on given remote host using the winrm_hook. :param winrm_hook: predefined ssh_hook to use for remote execution - :type winrm_hook: :class:`WinRMHook` + :type winrm_hook: airflow.contrib.hooks.winrm_hook.WinRMHook :param ssh_conn_id: connection id from airflow Connections :type ssh_conn_id: str :param remote_host: remote host to connect diff --git a/airflow/contrib/sensors/weekday_sensor.py b/airflow/contrib/sensors/weekday_sensor.py index e0357c5b2f496..8d99e11fb459f 100644 --- a/airflow/contrib/sensors/weekday_sensor.py +++ b/airflow/contrib/sensors/weekday_sensor.py @@ -66,7 +66,7 @@ class DayOfWeekSensor(BaseSensorOperator): * ``{WeekDay.TUESDAY}`` * ``{WeekDay.SATURDAY, WeekDay.SUNDAY}`` - :type week_day: set or str or WeekDay + :type week_day: set or str or airflow.contrib.utils.weekday.WeekDay :param use_task_execution_day: If ``True``, uses task's execution day to compare with week_day. Execution Date is Useful for backfilling. If ``False``, uses system's day of the week. Useful when you diff --git a/airflow/contrib/utils/gcp_field_sanitizer.py b/airflow/contrib/utils/gcp_field_sanitizer.py index c0a8985281e1a..bf9f9348f56a7 100644 --- a/airflow/contrib/utils/gcp_field_sanitizer.py +++ b/airflow/contrib/utils/gcp_field_sanitizer.py @@ -114,7 +114,7 @@ class GcpBodyFieldSanitizer(LoggingMixin): """Sanitizes the body according to specification. :param sanitize_specs: array of strings that specifies which fields to remove - :type sanitize_specs: [string] + :type sanitize_specs: list[str] """ def __init__(self, sanitize_specs): diff --git a/airflow/contrib/utils/gcp_field_validator.py b/airflow/contrib/utils/gcp_field_validator.py index 5ae1bdf9ce51e..73e788701cab5 100644 --- a/airflow/contrib/utils/gcp_field_validator.py +++ b/airflow/contrib/utils/gcp_field_validator.py @@ -190,7 +190,7 @@ class GcpBodyFieldValidator(LoggingMixin): for some examples and explanations of how to create specification. :param validation_specs: dictionary describing validation specification - :type validation_specs: [dict] + :type validation_specs: list[dict] :param api_version: Version of the api used (for example v1) :type api_version: str diff --git a/airflow/dag/base_dag.py b/airflow/dag/base_dag.py index 5719f572e9b4c..0e65775d41424 100644 --- a/airflow/dag/base_dag.py +++ b/airflow/dag/base_dag.py @@ -96,6 +96,6 @@ def dag_ids(self): def get_dag(self, dag_id): """ :return: whether the task exists in this bag - :rtype: BaseDag + :rtype: airflow.dag.base_dag.BaseDag """ raise NotImplementedError() diff --git a/airflow/exceptions.py b/airflow/exceptions.py index d4098c4a32435..41f0a3dda8e38 100644 --- a/airflow/exceptions.py +++ b/airflow/exceptions.py @@ -52,7 +52,7 @@ class AirflowRescheduleException(AirflowException): Raise when the task should be re-scheduled at a later time. :param reschedule_date: The date when the task should be rescheduled - :type reschedule: datetime + :type reschedule: datetime.datetime """ def __init__(self, reschedule_date): self.reschedule_date = reschedule_date diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 10694ea4b7dff..44769f62d328a 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -92,9 +92,9 @@ def fetch_celery_task_state(celery_task): global so that it can be called by subprocesses in the pool. :param celery_task: a tuple of the Celery task key and the async Celery object used to fetch the task's state - :type celery_task: (str, celery.result.AsyncResult) + :type celery_task: tuple(str, celery.result.AsyncResult) :return: a tuple of the Celery task key and the Celery state of the task - :rtype: (str, str) + :rtype: luple[str, str] """ try: diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py index 3e622da0531f3..c9454a677f832 100644 --- a/airflow/executors/local_executor.py +++ b/airflow/executors/local_executor.py @@ -75,7 +75,7 @@ def execute_work(self, key, command): """ Executes command received and stores result state in queue. :param key: the key to identify the TI - :type key: Tuple(dag_id, task_id, execution_date) + :type key: tuple(dag_id, task_id, execution_date) :param command: the command to execute :type command: str """ @@ -144,7 +144,7 @@ def start(self): def execute_async(self, key, command): """ :param key: the key to identify the TI - :type key: Tuple(dag_id, task_id, execution_date) + :type key: tuple(dag_id, task_id, execution_date) :param command: the command to execute :type command: str """ @@ -189,7 +189,7 @@ def start(self): def execute_async(self, key, command): """ :param key: the key to identify the TI - :type key: Tuple(dag_id, task_id, execution_date) + :type key: tuple(dag_id, task_id, execution_date) :param command: the command to execute :type command: str """ diff --git a/airflow/hooks/hdfs_hook.py b/airflow/hooks/hdfs_hook.py index b00bab42ce813..597b7c4f7ec83 100644 --- a/airflow/hooks/hdfs_hook.py +++ b/airflow/hooks/hdfs_hook.py @@ -39,7 +39,7 @@ class HDFSHook(BaseHook): Interact with HDFS. This class is a wrapper around the snakebite library. :param hdfs_conn_id: Connection id to fetch connection info - :type conn_id: str + :type hdfs_conn_id: str :param proxy_user: effective user for HDFS operations :type proxy_user: str :param autoconfig: use snakebite's automatically configured client diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py index 68a7cfe7eb106..2efeb3408a9c9 100644 --- a/airflow/hooks/hive_hooks.py +++ b/airflow/hooks/hive_hooks.py @@ -314,13 +314,13 @@ def load_df( not be sanitized. :param df: DataFrame to load into a Hive table - :type df: DataFrame + :type df: pandas.DataFrame :param table: target Hive table, use dot notation to target a specific database :type table: str :param field_dict: mapping from column name to hive data type. Note that it must be OrderedDict so as to keep columns' order. - :type field_dict: OrderedDict + :type field_dict: collections.OrderedDict :param delimiter: field delimiter in the file :type delimiter: str :param encoding: str encoding to use when writing DataFrame to file @@ -406,7 +406,7 @@ def load_file( :param field_dict: A dictionary of the fields name in the file as keys and their Hive types as values. Note that it must be OrderedDict so as to keep columns' order. - :type field_dict: OrderedDict + :type field_dict: collections.OrderedDict :param create: whether to create the table if it doesn't exist :type create: bool :param overwrite: whether to overwrite the data in table or partition @@ -844,8 +844,8 @@ def get_results(self, hql, schema='default', fetch_size=None, hive_conf=None): :type hql: str or list :param schema: target schema, default to 'default'. :type schema: str - :param fetch_size max: size of result to fetch. - :type fetch_size_max: int + :param fetch_size: max size of result to fetch. + :type fetch_size: int :param hive_conf: hive_conf to execute alone with the hql. :type hive_conf: dict :return: results of hql execution, dict with data (list of results) and header @@ -957,6 +957,8 @@ def get_pandas_df(self, hql, schema='default'): >>> df = hh.get_pandas_df(sql) >>> len(df.index) 100 + + :return: pandas.DateFrame """ import pandas as pd res = self.get_results(hql, schema=schema) diff --git a/airflow/jobs.py b/airflow/jobs.py index 552e12c21f106..70b0503f62fbd 100644 --- a/airflow/jobs.py +++ b/airflow/jobs.py @@ -232,9 +232,9 @@ def reset_state_for_orphaned_tasks(self, filter_by_dag_run=None, session=None): sequence. :param filter_by_dag_run: the dag_run we want to process, None if all - :type filter_by_dag_run: models.DagRun + :type filter_by_dag_run: airflow.models.DagRun :return: the TIs reset (in expired SQLAlchemy state) - :rtype: List(TaskInstance) + :rtype: list[airflow.models.TaskInstance] """ queued_tis = self.executor.queued_tasks # also consider running as the state might not have changed in the db yet @@ -315,7 +315,7 @@ def __init__(self, file_path, pickle_dags, dag_id_white_list, zombies): :param dag_id_whitelist: If specified, only look at these DAG ID's :type dag_id_whitelist: list[unicode] :param zombies: zombie task instances to kill - :type zombies: list[SimpleTaskInstance] + :type zombies: list[airflow.utils.dag_processing.SimpleTaskInstance] """ self._file_path = file_path # Queue that's used to pass results from the child process. @@ -365,7 +365,7 @@ def _launch_process(result_queue, :return: the process that was launched :rtype: multiprocessing.Process :param zombies: zombie task instances to kill - :type zombies: list[SimpleTaskInstance] + :type zombies: list[airflow.utils.dag_processing.SimpleTaskInstance] """ def helper(): # This helper runs in the newly created process @@ -509,7 +509,7 @@ def done(self): def result(self): """ :return: result of running SchedulerJob.process_file() - :rtype: SimpleDag + :rtype: airflow.utils.dag_processing.SimpleDag """ if not self.done: raise AirflowException("Tried to get the result before it's done!") @@ -751,7 +751,7 @@ def update_import_errors(session, dagbag): :param session: session for ORM operations :type session: sqlalchemy.orm.session.Session :param dagbag: DagBag containing DAGs with import errors - :type dagbag: models.Dagbag + :type dagbag: airflow.models.DagBag """ # Clear the errors of the processed files for dagbag_file in dagbag.file_last_changed: @@ -975,12 +975,12 @@ def _change_state_for_tis_without_dagrun(self, changed manually. :param old_states: examine TaskInstances in this state - :type old_state: list[State] + :type old_state: list[airflow.utils.state.State] :param new_state: set TaskInstances to this state - :type new_state: State + :type new_state: airflow.utils.state.State :param simple_dag_bag: TaskInstances associated with DAGs in the simple_dag_bag and with states in the old_state will be examined - :type simple_dag_bag: SimpleDagBag + :type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag """ tis_changed = 0 query = session \ @@ -1025,9 +1025,9 @@ def __get_task_concurrency_map(self, states, session=None): Returns a map from tasks to number in the states list given. :param states: List of states to query for - :type states: List[State] + :type states: list[airflow.utils.state.State] :return: A map from (dag_id, task_id) to count of tasks in states - :rtype: Dict[[String, String], Int] + :rtype: dict[tuple[str, str], int] """ TI = models.TaskInstance @@ -1051,12 +1051,12 @@ def _find_executable_task_instances(self, simple_dag_bag, states, session=None): :param simple_dag_bag: TaskInstances associated with DAGs in the simple_dag_bag will be fetched from the DB and executed - :type simple_dag_bag: SimpleDagBag + :type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag :param executor: the executor that runs task instances :type executor: BaseExecutor :param states: Execute TaskInstances in these states - :type states: Tuple[State] - :return: List[TaskInstance] + :type states: tuple[airflow.utils.state.State] + :return: list[airflow.models.TaskInstance] """ executable_tis = [] @@ -1227,10 +1227,10 @@ def _change_state_for_executable_task_instances(self, task_instances, to QUEUED atomically, and returns the TIs changed in SimpleTaskInstance format. :param task_instances: TaskInstances to change the state of - :type task_instances: List[TaskInstance] + :type task_instances: list[airflow.models.TaskInstance] :param acceptable_states: Filters the TaskInstances updated to be in these states :type acceptable_states: Iterable[State] - :return: List[SimpleTaskInstance] + :rtype: list[airflow.utils.dag_processing.SimpleTaskInstance] """ if len(task_instances) == 0: session.commit() @@ -1292,9 +1292,9 @@ def _enqueue_task_instances_with_queued_state(self, simple_dag_bag, with the executor. :param simple_task_instances: TaskInstances to enqueue - :type simple_task_instances: List[SimpleTaskInstance] + :type simple_task_instances: list[SimpleTaskInstance] :param simple_dag_bag: Should contains all of the task_instances' dags - :type simple_dag_bag: SimpleDagBag + :type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag """ TI = models.TaskInstance # actually enqueue them @@ -1343,9 +1343,9 @@ def _execute_task_instances(self, :param simple_dag_bag: TaskInstances associated with DAGs in the simple_dag_bag will be fetched from the DB and executed - :type simple_dag_bag: SimpleDagBag + :type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag :param states: Execute TaskInstances in these states - :type states: Tuple[State] + :type states: tuple[airflow.utils.state.State] :return: Number of task instance with state changed. """ executable_tis = self._find_executable_task_instances(simple_dag_bag, states, @@ -1414,12 +1414,12 @@ def _process_dags(self, dagbag, dags, tis_out): 3. Send emails for tasks that have missed SLAs. :param dagbag: a collection of DAGs to process - :type dagbag: models.DagBag + :type dagbag: airflow.models.DagBag :param dags: the DAGs from the DagBag to process - :type dags: DAG + :type dags: airflow.models.DAG :param tis_out: A queue to add generated TaskInstance objects :type tis_out: multiprocessing.Queue[TaskInstance] - :return: None + :rtype: None """ for dag in dags: dag = dagbag.get_dag(dag.dag_id) @@ -1539,7 +1539,7 @@ def _execute_helper(self): .. image:: ../docs/img/scheduler_loop.jpg - :return: None + :rtype: None """ self.executor.start() @@ -1684,12 +1684,12 @@ def process_file(self, file_path, zombies, pickle_dags=False, session=None): :param file_path: the path to the Python file that should be executed :type file_path: unicode :param zombies: zombie task instances to kill. - :type zombies: list[SimpleTaskInstance] + :type zombies: list[airflow.utils.dag_processing.SimpleTaskInstance] :param pickle_dags: whether serialize the DAGs found in the file and save them to the db :type pickle_dags: bool :return: a list of SimpleDags made from the Dags found in the file - :rtype: list[SimpleDag] + :rtype: list[airflow.utils.dag_processing.SimpleDagBag] """ self.log.info("Processing file %s for tasks to queue", file_path) # As DAGs are parsed from this file, they will be converted into SimpleDags @@ -1829,23 +1829,23 @@ def __init__(self, ): """ :param to_run: Tasks to run in the backfill - :type to_run: dict[Tuple[String, String, DateTime], TaskInstance] + :type to_run: dict[tuple[string, string, datetime.datetime], airflow.models.TaskInstance] :param running: Maps running task instance key to task instance object - :type running: dict[Tuple[String, String, DateTime], TaskInstance] + :type running: dict[tuple[string, string, datetime.datetime], airflow.models.TaskInstance] :param skipped: Tasks that have been skipped - :type skipped: set[Tuple[String, String, DateTime]] + :type skipped: set[tuple[string, string, datetime.datetime]] :param succeeded: Tasks that have succeeded so far - :type succeeded: set[Tuple[String, String, DateTime]] + :type succeeded: set[tuple[string, string, datetime.datetime]] :param failed: Tasks that have failed - :type failed: set[Tuple[String, String, DateTime]] + :type failed: set[tuple[string, string, datetime.datetime]] :param not_ready: Tasks not ready for execution - :type not_ready: set[Tuple[String, String, DateTime]] + :type not_ready: set[tuple[string, string, datetime.datetime]] :param deadlocked: Deadlocked tasks - :type deadlocked: set[Tuple[String, String, DateTime]] + :type deadlocked: set[tuple[string, string, datetime.datetime]] :param active_runs: Active dag runs at a certain point in time :type active_runs: list[DagRun] :param executed_dag_run_dates: Datetime objects for the executed dag runs - :type executed_dag_run_dates: set[Datetime] + :type executed_dag_run_dates: set[datetime.datetime] :param finished_runs: Number of finished runs so far :type finished_runs: int :param total_runs: Number of total dag runs able to run @@ -1880,11 +1880,11 @@ def __init__( *args, **kwargs): """ :param dag: DAG object. - :type dag: `class DAG`. + :type dag: airflow.models.DAG :param start_date: start date for the backfill date range. - :type start_date: datetime. + :type start_date: datetime.datetime :param end_date: end date for the backfill date range. - :type end_date: datetime + :type end_date: datetime.datetime :param mark_success: flag whether to mark the task auto success. :type mark_success: bool :param donot_pickle: whether pickle @@ -2009,9 +2009,9 @@ def _get_dag_run(self, run_date, session=None): limit is reached, this function will return None. :param run_date: the execution date for the dag run - :type run_date: datetime + :type run_date: datetime.datetime :param session: the database session object - :type session: Session + :type session: sqlalchemy.orm.session.Session :return: a DagRun in state RUNNING or None """ run_id = BackfillJob.ID_FORMAT_PREFIX.format(run_date.isoformat()) @@ -2069,9 +2069,9 @@ def _task_instances_for_dag_run(self, dag_run, session=None): run in the given dag run. :param dag_run: the dag run to get the tasks from - :type dag_run: models.DagRun + :type dag_run: airflow.models.DagRun :param session: the database session object - :type session: Session + :type session: sqlalchemy.orm.session.Session """ tasks_to_run = {} @@ -2141,9 +2141,9 @@ def _process_backfill_task_instances(self, :param pickle_id: the pickle_id if dag is pickled, None otherwise :type pickle_id: int :param start_date: the start date of the backfill job - :type start_date: datetime + :type start_date: datetime.datetime :param session: the current session object - :type session: Session + :type session: sqlalchemy.orm.session.Session :return: the list of execution_dates for the finished dag runs :rtype: list """ @@ -2388,9 +2388,9 @@ def _execute_for_run_dates(self, run_dates, ti_status, executor, pickle_id, :param pickle_id: numeric id of the pickled dag, None if not pickled :type pickle_id: int :param start_date: backfill start date - :type start_date: datetime + :type start_date: datetime.datetime :param session: the current session object - :type session: Session + :type session: sqlalchemy.orm.session.Session """ for next_run_date in run_dates: dag_run = self._get_dag_run(next_run_date, session=session) diff --git a/airflow/macros/hive.py b/airflow/macros/hive.py index f8e57bb5156dd..914b9af2c4b1e 100644 --- a/airflow/macros/hive.py +++ b/airflow/macros/hive.py @@ -63,7 +63,7 @@ def _closest_date(target_dt, date_list, before_target=None): :param target_dt: The target date :type target_dt: datetime.date :param date_list: The list of dates to search - :type date_list: datetime.date list + :type date_list: list[datetime.date] :param before_target: closest before or after the target :type before_target: bool or None :returns: The closest date @@ -90,7 +90,7 @@ def closest_ds_partition( :param table: A hive table name :type table: str :param ds: A datestamp ``%Y-%m-%d`` e.g. ``yyyy-mm-dd`` - :type ds: datetime.date list + :type ds: list[datetime.date] :param before: closest before (True), after (False) or either side of ds :type before: bool or None :returns: The closest date diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 0d279b659d482..e1d7eb491dd50 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -151,7 +151,7 @@ def get_fernet(): or because the Fernet key is invalid. :return: Fernet object - :raises: AirflowException if there's a problem trying to load Fernet + :raises: airflow.exceptions.AirflowException if there's a problem trying to load Fernet """ global _fernet log = LoggingMixin().log @@ -476,9 +476,9 @@ def kill_zombies(self, zombies, session=None): had a heartbeat for too long, in the current DagBag. :param zombies: zombie task instances to kill. - :type zombies: ``SimpleTaskInstance`` + :type zombies: airflow.utils.dag_processing.SimpleTaskInstance :param session: DB session. - :type session: Session + :type session: sqlalchemy.orm.session.Session """ for zombie in zombies: if zombie.dag_id in self.dags: @@ -1059,7 +1059,7 @@ def are_dependencies_met( should be evaluated. :type dep_context: DepContext :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session :param verbose: whether log details on failed dependencies on info or debug log level :type verbose: bool @@ -1928,13 +1928,13 @@ class derived from this one results in the creation of a task object, failing the task :type retries: int :param retry_delay: delay between retries - :type retry_delay: timedelta + :type retry_delay: datetime.timedelta :param retry_exponential_backoff: allow progressive longer waits between retries by using exponential backoff algorithm on retry delay (delay will be converted into seconds) :type retry_exponential_backoff: bool :param max_retry_delay: maximum delay interval between retries - :type max_retry_delay: timedelta + :type max_retry_delay: datetime.timedelta :param start_date: The ``start_date`` for the task, determines the ``execution_date`` for the first task instance. The best practice is to have the start_date rounded @@ -1951,9 +1951,9 @@ class derived from this one results in the creation of a task object, ``TimeSensor`` and ``TimeDeltaSensor``. We advise against using dynamic ``start_date`` and recommend using fixed ones. Read the FAQ entry about start_date for more information. - :type start_date: datetime + :type start_date: datetime.datetime :param end_date: if specified, the scheduler won't go beyond this date - :type end_date: datetime + :type end_date: datetime.datetime :param depends_on_past: when set to true, task instances will run sequentially while relying on the previous task's schedule to succeed. The task instance for the start_date is allowed to run. @@ -1970,7 +1970,7 @@ class derived from this one results in the creation of a task object, does support targeting specific queues. :type queue: str :param dag: a reference to the dag the task is attached to (if any) - :type dag: DAG + :type dag: airflow.models.DAG :param priority_weight: priority weight of this task against other task. This allows the executor to trigger higher priority tasks before others when things get backed up. Set priority_weight as a higher @@ -2881,15 +2881,15 @@ def create_dagrun(self, :param run_id: defines the the run id for this dag run :type run_id: str :param execution_date: the execution date of this dag run - :type execution_date: datetime + :type execution_date: datetime.datetime :param state: the state of the dag run - :type state: State + :type state: airflow.utils.state.State :param start_date: the date this dag run should be evaluated - :type start_date: datetime + :type start_date: datetime.datetime :param external_trigger: whether this dag run is externally triggered :type external_trigger: bool :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session """ return self.get_dag().create_dagrun(run_id=run_id, @@ -2934,7 +2934,7 @@ class DAG(BaseDag, LoggingMixin): defines where jinja will look for your templates. Order matters. Note that jinja/airflow includes the path of your DAG file by default - :type template_searchpath: str or list of stings + :type template_searchpath: str or list[str] :param user_defined_macros: a dictionary of macros that will be exposed in your jinja templates. For example, passing ``dict(foo='bar')`` to this argument allows you to ``{{ foo }}`` in all jinja @@ -3963,9 +3963,9 @@ def run( Runs the DAG. :param start_date: the start date of the range to run - :type start_date: datetime + :type start_date: datetime.datetime :param end_date: the end date of the range to run - :type end_date: datetime + :type end_date: datetime.datetime :param mark_success: True to mark jobs as succeeded without running them :type mark_success: bool :param local: True to run the tasks using the LocalExecutor @@ -4036,15 +4036,15 @@ def create_dagrun(self, :param run_id: defines the the run id for this dag run :type run_id: str :param execution_date: the execution date of this dag run - :type execution_date: datetime + :type execution_date: datetime.datetime :param state: the state of the dag run - :type state: State + :type state: airflow.utils.state.State :param start_date: the date this dag run should be evaluated :type start_date: datetime :param external_trigger: whether this dag run is externally triggered :type external_trigger: bool :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session """ run = DagRun( dag_id=self.dag_id, @@ -4077,7 +4077,7 @@ def sync_to_db(self, owner=None, sync_time=None, session=None): SubDagOperator. :param dag: the DAG object to save to the DB - :type dag: DAG + :type dag: airflow.models.DAG :param sync_time: The time that the DAG should be marked as sync'ed :type sync_time: datetime :return: None @@ -4263,7 +4263,7 @@ def setdefault(cls, key, default, deserialize_json=False): for a key, and if it isn't there, stores the default value and returns it. :param key: Dict key for this Variable - :type key: String + :type key: str :param default: Default value to set and return if the variable isn't already in the DB :type default: Mixed @@ -4404,16 +4404,16 @@ def find(dag_id=None, run_id=None, execution_date=None, :param run_id: defines the the run id for this dag run :type run_id: str :param execution_date: the execution date - :type execution_date: datetime + :type execution_date: datetime.datetime :param state: the state of the dag run - :type state: State + :type state: airflow.utils.state.State :param external_trigger: whether this dag run is externally triggered :type external_trigger: bool :param no_backfills: return no backfills (True), return all (False). Defaults to False :type no_backfills: bool :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session """ DR = DagRun diff --git a/airflow/models/taskreschedule.py b/airflow/models/taskreschedule.py index 0beedee01225f..60bad78aa41a1 100755 --- a/airflow/models/taskreschedule.py +++ b/airflow/models/taskreschedule.py @@ -69,7 +69,7 @@ def find_for_task_instance(task_instance, session): in ascending order. :param task_instance: the task instance to find task reschedules for - :type task_instance: TaskInstance + :type task_instance: airflow.models.TaskInstance """ TR = TaskReschedule return ( diff --git a/airflow/operators/docker_operator.py b/airflow/operators/docker_operator.py index cf9838593fe18..1d550cec8804a 100644 --- a/airflow/operators/docker_operator.py +++ b/airflow/operators/docker_operator.py @@ -59,9 +59,9 @@ class DockerOperator(BaseOperator): https://docs.docker.com/engine/reference/run/#cpu-share-constraint :type cpus: float :param dns: Docker custom DNS servers - :type dns: list of strings + :type dns: list[str] :param dns_search: Docker custom DNS search domain - :type dns_search: list of strings + :type dns_search: list[str] :param docker_url: URL of the host running the docker daemon. Default is unix://var/run/docker.sock :type docker_url: str diff --git a/airflow/operators/generic_transfer.py b/airflow/operators/generic_transfer.py index 7b1a64ee753d5..61d6692e5ff6b 100644 --- a/airflow/operators/generic_transfer.py +++ b/airflow/operators/generic_transfer.py @@ -41,7 +41,7 @@ class GenericTransfer(BaseOperator): :type destination_conn_id: str :param preoperator: sql statement or list of statements to be executed prior to loading the data. (templated) - :type preoperator: str or list of str + :type preoperator: str or list[str] """ template_fields = ('sql', 'destination_table', 'preoperator') diff --git a/airflow/operators/hive_to_druid.py b/airflow/operators/hive_to_druid.py index 8d5ed0361a19c..425bc54b4fa53 100644 --- a/airflow/operators/hive_to_druid.py +++ b/airflow/operators/hive_to_druid.py @@ -48,7 +48,7 @@ class HiveToDruidTransfer(BaseOperator): :type metastore_conn_id: str :param hadoop_dependency_coordinates: list of coordinates to squeeze int the ingest json - :type hadoop_dependency_coordinates: list of str + :type hadoop_dependency_coordinates: list[str] :param intervals: list of time intervals that defines segments, this is passed as is to the json object. (templated) :type intervals: list diff --git a/airflow/operators/mysql_operator.py b/airflow/operators/mysql_operator.py index 2cc9b07e3478b..905a43d64b964 100644 --- a/airflow/operators/mysql_operator.py +++ b/airflow/operators/mysql_operator.py @@ -26,10 +26,11 @@ class MySqlOperator(BaseOperator): """ Executes sql code in a specific MySQL database - :param sql: the sql code to be executed. (templated) - :type sql: Can receive a str representing a sql statement, - a list of str (sql statements), or reference to a template file. + :param sql: the sql code to be executed. Can receive a str representing a + sql statement, a list of str (sql statements), or reference to a template file. Template reference are recognized by str ending in '.sql' + (templated) + :type sql: str or list[str] :param mysql_conn_id: reference to a specific mysql database :type mysql_conn_id: str :param parameters: (optional) the parameters to render the SQL query with. diff --git a/airflow/operators/oracle_operator.py b/airflow/operators/oracle_operator.py index 2d1a655f9a552..5cb62ef8a3455 100644 --- a/airflow/operators/oracle_operator.py +++ b/airflow/operators/oracle_operator.py @@ -26,10 +26,11 @@ class OracleOperator(BaseOperator): """ Executes sql code in a specific Oracle database - :param sql: the sql code to be executed. (templated) - :type sql: Can receive a str representing a sql statement, + :param sql: the sql code to be executed. Can receive a str representing a sql statement, a list of str (sql statements), or reference to a template file. Template reference are recognized by str ending in '.sql' + (templated) + :type sql: str or list[str] :param oracle_conn_id: reference to a specific Oracle database :type oracle_conn_id: str :param parameters: (optional) the parameters to render the SQL query with. diff --git a/airflow/operators/python_operator.py b/airflow/operators/python_operator.py index c03a9a106e6e0..755f39e0160e4 100644 --- a/airflow/operators/python_operator.py +++ b/airflow/operators/python_operator.py @@ -57,10 +57,10 @@ class PythonOperator(BaseOperator): will get templated by the Airflow engine sometime between ``__init__`` and ``execute`` takes place and are made available in your callable's context after the template has been applied. (templated) - :type templates_dict: dict of str + :type templates_dict: dict[str] :param templates_exts: a list of file extensions to resolve while processing templated fields, for examples ``['.sql', '.hql']`` - :type templates_exts: list(str) + :type templates_exts: list[str] """ template_fields = ('templates_dict',) template_ext = tuple() @@ -197,7 +197,7 @@ class PythonVirtualenvOperator(PythonOperator): defined with def, which will be run in a virtualenv :type python_callable: function :param requirements: A list of requirements as specified in a pip install command - :type requirements: list(str) + :type requirements: list[str] :param python_version: The Python version to run the virtualenv with. Note that both 2 and 2.7 are acceptable forms. :type python_version: str @@ -214,9 +214,9 @@ class PythonVirtualenvOperator(PythonOperator): :param op_kwargs: A dict of keyword arguments to pass to python_callable. :type op_kwargs: dict :param string_args: Strings that are present in the global var virtualenv_string_args, - available to python_callable at runtime as a list(str). Note that args are split + available to python_callable at runtime as a list[str]. Note that args are split by newline. - :type string_args: list(str) + :type string_args: list[str] :param templates_dict: a dictionary where the values are templates that will get templated by the Airflow engine sometime between ``__init__`` and ``execute`` takes place and are made available @@ -224,7 +224,7 @@ class PythonVirtualenvOperator(PythonOperator): :type templates_dict: dict of str :param templates_exts: a list of file extensions to resolve while processing templated fields, for examples ``['.sql', '.hql']`` - :type templates_exts: list(str) + :type templates_exts: list[str] """ @apply_defaults def __init__(self, python_callable, diff --git a/airflow/operators/subdag_operator.py b/airflow/operators/subdag_operator.py index 90f17fcf6aa10..bdee51548d874 100644 --- a/airflow/operators/subdag_operator.py +++ b/airflow/operators/subdag_operator.py @@ -30,12 +30,12 @@ class SubDagOperator(BaseOperator): should be prefixed by its parent and a dot. As in `parent.child`. :param subdag: the DAG object to run as a subdag of the current DAG. - :type subdag: airflow.DAG. + :type subdag: airflow.models.DAG :param dag: the parent DAG for the subdag. - :type dag: airflow.DAG. + :type dag: airflow.models.DAG :param executor: the executor for this subdag. Default to use SequentialExecutor. Please find AIRFLOW-74 for more details. - :type executor: airflow.executors. + :type executor: airflow.executors.base_executor.BaseExecutor """ template_fields = tuple() diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py index 811e1b4c58122..b3966fad098d2 100644 --- a/airflow/plugins_manager.py +++ b/airflow/plugins_manager.py @@ -81,8 +81,8 @@ def load_entrypoint_plugins(entry_points, airflow_plugins): :type entry_points: Generator[setuptools.EntryPoint, None, None] :param airflow_plugins: A collection of existing airflow plugins to ensure we don't load duplicates - :type airflow_plugins: List[AirflowPlugin] - :return: List[Type[AirflowPlugin]] + :type airflow_plugins: list[type[airflow.plugins_manager.AirflowPlugin]] + :rtype: list[airflow.plugins_manager.AirflowPlugin] """ for entry_point in entry_points: log.debug('Importing entry_point plugin %s', entry_point.name) diff --git a/airflow/sensors/hdfs_sensor.py b/airflow/sensors/hdfs_sensor.py index 298ac48aecc6c..3eb5145ec9668 100644 --- a/airflow/sensors/hdfs_sensor.py +++ b/airflow/sensors/hdfs_sensor.py @@ -80,10 +80,14 @@ def filter_for_ignored_ext(result, ignored_ext, ignore_copying): """ Will filter if instructed to do so the result to remove matching criteria - :param result: (list) of dicts returned by Snakebite ls - :param ignored_ext: (list) of ignored extensions - :param ignore_copying: (bool) shall we ignore ? - :return: (list) of dicts which were not removed + :param result: list of dicts returned by Snakebite ls + :type result: list[dict] + :param ignored_ext: list of ignored extensions + :type ignored_ext: list + :param ignore_copying: shall we ignore ? + :type ignore_copying: bool + :return: list of dicts which were not removed + :rtype: list[dict] """ if ignore_copying: log = LoggingMixin().log diff --git a/airflow/sensors/named_hive_partition_sensor.py b/airflow/sensors/named_hive_partition_sensor.py index 4a076a3dd6870..f436614507ac8 100644 --- a/airflow/sensors/named_hive_partition_sensor.py +++ b/airflow/sensors/named_hive_partition_sensor.py @@ -34,7 +34,7 @@ class NamedHivePartitionSensor(BaseSensorOperator): Thrift client ``get_partitions_by_name`` method. Note that you cannot use logical or comparison operators as in HivePartitionSensor. - :type partition_names: list of strings + :type partition_names: list[str] :param metastore_conn_id: reference to the metastore thrift service connection id :type metastore_conn_id: str diff --git a/airflow/settings.py b/airflow/settings.py index 59767085a2611..c4df4788bcf66 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -243,7 +243,7 @@ def configure_action_logging(): """ Any additional configuration (register callback) for airflow.utils.action_loggers module - :return: None + :rtype: None """ pass diff --git a/airflow/ti_deps/dep_context.py b/airflow/ti_deps/dep_context.py index f946cf720e5e2..96b0e773fbe3c 100644 --- a/airflow/ti_deps/dep_context.py +++ b/airflow/ti_deps/dep_context.py @@ -44,7 +44,7 @@ class DepContext(object): :param deps: The context-specific dependencies that need to be evaluated for a task instance to run in this execution context. - :type deps: set(BaseTIDep) + :type deps: set(airflow.ti_deps.deps.base_ti_dep.BaseTIDep) :param flag_upstream_failed: This is a hack to generate the upstream_failed state creation while checking to see whether the task instance is runnable. It was the shortest path to add the feature. This is bad since this class should be pure (no diff --git a/airflow/ti_deps/deps/base_ti_dep.py b/airflow/ti_deps/deps/base_ti_dep.py index 90c156cb760b9..1be9bd7d67650 100644 --- a/airflow/ti_deps/deps/base_ti_dep.py +++ b/airflow/ti_deps/deps/base_ti_dep.py @@ -66,9 +66,9 @@ def _get_dep_statuses(self, ti, session, dep_context=None): representing if each of the passed in task's upstream tasks succeeded or not. :param ti: the task instance to get the dependency status for - :type ti: TaskInstance + :type ti: airflow.models.TaskInstance :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session :param dep_context: the context for which this dependency should be evaluated for :type dep_context: DepContext """ @@ -81,9 +81,9 @@ def get_dep_statuses(self, ti, session, dep_context=None): checks for all dependencies. :param ti: the task instance to get the dependency status for - :type ti: TaskInstance + :type ti: airflow.models.TaskInstance :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session :param dep_context: the context for which this dependency should be evaluated for :type dep_context: DepContext """ @@ -114,9 +114,9 @@ def is_met(self, ti, session, dep_context=None): passing. :param ti: the task instance to see if this dependency is met for - :type ti: TaskInstance + :type ti: airflow.models.TaskInstance :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session :param dep_context: The context this dependency is being checked under that stores state that can be used by this dependency. :type dep_context: BaseDepContext @@ -130,9 +130,9 @@ def get_failure_reasons(self, ti, session, dep_context=None): Returns an iterable of strings that explain why this dependency wasn't met. :param ti: the task instance to see if this dependency is met for - :type ti: TaskInstance + :type ti: airflow.models.TaskInstance :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session :param dep_context: The context this dependency is being checked under that stores state that can be used by this dependency. :type dep_context: BaseDepContext diff --git a/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow/ti_deps/deps/trigger_rule_dep.py index f1d58d005791d..71994fc2e0946 100644 --- a/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow/ti_deps/deps/trigger_rule_dep.py @@ -103,7 +103,7 @@ def _evaluate_trigger_rule( rule was met. :param ti: the task instance to evaluate the trigger rule of - :type ti: TaskInstance + :type ti: airflow.models.TaskInstance :param successes: Number of successful upstream tasks :type successes: bool :param skipped: Number of skipped upstream tasks @@ -120,7 +120,7 @@ def _evaluate_trigger_rule( path to add the feature :type flag_upstream_failed: bool :param session: database session - :type session: Session + :type session: sqlalchemy.orm.session.Session """ TR = airflow.models.TriggerRule diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index 27546591277dd..5cc99b7c57f53 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -63,7 +63,7 @@ class SimpleDag(BaseDag): def __init__(self, dag, pickle_id=None): """ :param dag: the DAG - :type dag: DAG + :type dag: airflow.models.DAG :param pickle_id: ID associated with the pickled version of this DAG. :type pickle_id: unicode """ @@ -247,7 +247,7 @@ def __init__(self, simple_dags): Constructor. :param simple_dags: SimpleDag objects that should be in this - :type: list(SimpleDag) + :type list(airflow.utils.dag_processing.SimpleDagBag) """ self.simple_dags = simple_dags self.dag_id_to_simple_dag = {} @@ -269,7 +269,7 @@ def get_dag(self, dag_id): :type dag_id: unicode :return: if the given DAG ID exists in the bag, return the BaseDag corresponding to that ID. Otherwise, throw an Exception - :rtype: SimpleDag + :rtype: airflow.utils.dag_processing.SimpleDag """ if dag_id not in self.dag_id_to_simple_dag: raise AirflowException("Unknown DAG ID {}".format(dag_id)) @@ -406,7 +406,7 @@ def done(self): def result(self): """ :return: result of running SchedulerJob.process_file() - :rtype: list[SimpleDag] + :rtype: list[airflow.utils.dag_processing.SimpleDag] """ raise NotImplementedError() @@ -456,15 +456,15 @@ def __init__(self, async_mode): """ :param dag_directory: Directory where DAG definitions are kept. All - files in file_paths should be under this directory + files in file_paths should be under this directory :type dag_directory: unicode :param file_paths: list of file paths that contain DAG definitions :type file_paths: list[unicode] :param max_runs: The number of times to parse and schedule each file. -1 - for unlimited. + for unlimited. :type max_runs: int :param processor_factory: function that creates processors for DAG - definition files. Arguments are (dag_definition_path, log_file_path) + definition files. Arguments are (dag_definition_path, log_file_path) :type processor_factory: (unicode, unicode, list) -> (AbstractDagFileProcessor) :param async_mode: Whether to start agent in async mode :type async_mode: bool @@ -679,7 +679,7 @@ class DagFileProcessorManager(LoggingMixin): :type _file_path_queue: list[unicode] :type _processors: dict[unicode, AbstractDagFileProcessor] :type _last_runtime: dict[unicode, float] - :type _last_finish_time: dict[unicode, datetime] + :type _last_finish_time: dict[unicode, datetime.datetime] """ def __init__(self, @@ -693,18 +693,18 @@ def __init__(self, async_mode=True): """ :param dag_directory: Directory where DAG definitions are kept. All - files in file_paths should be under this directory + files in file_paths should be under this directory :type dag_directory: unicode :param file_paths: list of file paths that contain DAG definitions :type file_paths: list[unicode] :param max_runs: The number of times to parse and schedule each file. -1 - for unlimited. + for unlimited. :type max_runs: int :param processor_factory: function that creates processors for DAG - definition files. Arguments are (dag_definition_path) + definition files. Arguments are (dag_definition_path) :type processor_factory: (unicode, unicode, list) -> (AbstractDagFileProcessor) :param signal_conn: connection to communicate signal with processor agent. - :type signal_conn: Connection + :type signal_conn: airflow.models.connection.Connection :param stat_queue: the queue to use for passing back parsing stat to agent. :type stat_queue: multiprocessing.Queue :param result_queue: the queue to use for passing back the result to agent. @@ -781,7 +781,6 @@ def start(self): DAGs in parallel. By processing them in separate processes, we can get parallelism and isolation from potentially harmful user code. - :return: """ self.log.info("Processing files using up to {} processes at a time " @@ -915,7 +914,6 @@ def _refresh_dag_dir(self): def _print_stat(self): """ Occasionally print out stats about how fast the files are getting processed - :return: """ if ((timezone.utcnow() - self.last_stat_print_time).total_seconds() > self.print_stats_interval): @@ -927,6 +925,7 @@ def _print_stat(self): def clear_nonexistent_import_errors(self, session): """ Clears import errors for files that no longer exist. + :param session: session for ORM operations :type session: sqlalchemy.orm.session.Session """ @@ -941,6 +940,7 @@ def clear_nonexistent_import_errors(self, session): def _log_file_processing_stats(self, known_file_paths): """ Print out stats about how files are getting processed. + :param known_file_paths: a list of file paths that may contain Airflow DAG definitions :type known_file_paths: list[unicode] @@ -1008,7 +1008,7 @@ def get_pid(self, file_path): :param file_path: the path to the file that's being processed :type file_path: unicode :return: the PID of the process processing the given file or None if - the specified file is not being processed + the specified file is not being processed :rtype: int """ if file_path in self._processors: @@ -1027,8 +1027,8 @@ def get_runtime(self, file_path): :param file_path: the path to the file that's being processed :type file_path: unicode :return: the current runtime (in seconds) of the process that's - processing the specified file or None if the file is not currently - being processed + processing the specified file or None if the file is not currently + being processed """ if file_path in self._processors: return (timezone.utcnow() - self._processors[file_path].start_time)\ @@ -1040,7 +1040,7 @@ def get_last_runtime(self, file_path): :param file_path: the path to the file that was processed :type file_path: unicode :return: the runtime (in seconds) of the process of the last run, or - None if the file was never processed. + None if the file was never processed. :rtype: float """ return self._last_runtime.get(file_path) @@ -1050,7 +1050,7 @@ def get_last_finish_time(self, file_path): :param file_path: the path to the file that was processed :type file_path: unicode :return: the finish time of the process of the last run, or None if the - file was never processed. + file was never processed. :rtype: datetime """ return self._last_finish_time.get(file_path) @@ -1060,7 +1060,7 @@ def get_start_time(self, file_path): :param file_path: the path to the file that's being processed :type file_path: unicode :return: the start time of the process that's processing the - specified file or None if the file is not currently being processed + specified file or None if the file is not currently being processed :rtype: datetime """ if file_path in self._processors: @@ -1110,8 +1110,8 @@ def heartbeat(self): results from the finished processors. :return: a list of SimpleDags that were produced by processors that - have finished since the last time this was called - :rtype: list[SimpleDag] + have finished since the last time this was called + :rtype: list[airflow.utils.dag_processing.SimpleDag] """ finished_processors = {} """:type : dict[unicode, AbstractDagFileProcessor]""" diff --git a/airflow/utils/log/es_task_handler.py b/airflow/utils/log/es_task_handler.py index 2dbee94171f41..94a2224b18700 100644 --- a/airflow/utils/log/es_task_handler.py +++ b/airflow/utils/log/es_task_handler.py @@ -87,7 +87,7 @@ def _read(self, ti, try_number, metadata=None): :param try_number: try_number of the task instance :param metadata: log metadata, can be used for steaming log reading and auto-tailing. - :return a list of log documents and metadata. + :return: a list of log documents and metadata. """ if not metadata: metadata = {'offset': 0} diff --git a/airflow/utils/log/file_processor_handler.py b/airflow/utils/log/file_processor_handler.py index cc7a8bd843ace..8b0bc978e1aa3 100644 --- a/airflow/utils/log/file_processor_handler.py +++ b/airflow/utils/log/file_processor_handler.py @@ -131,7 +131,7 @@ def _init_file(self, filename): """ Create log file and directory if required. :param filename: task instance object - :return relative log path of the given task instance + :return: relative log path of the given task instance """ relative_path = self._render_filename(filename) full_path = os.path.join(self._get_log_directory(), relative_path) diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index 113bd254ad0b2..111f26c279cf0 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -171,7 +171,7 @@ def _init_file(self, ti): """ Create log directory and give it correct permissions. :param ti: task instance object - :return relative log path of the given task instance + :return: relative log path of the given task instance """ # To handle log writing when tasks are impersonated, the log files need to # be writable by the user that runs the Airflow command and the user diff --git a/airflow/utils/operator_helpers.py b/airflow/utils/operator_helpers.py index e981941d84fea..e015f1d5ef072 100644 --- a/airflow/utils/operator_helpers.py +++ b/airflow/utils/operator_helpers.py @@ -41,7 +41,7 @@ def context_to_airflow_vars(context, in_env_var_format=False): :type context: dict :param in_env_var_format: If returned vars should be in ABC_DEF_GHI format. :type in_env_var_format: bool - :return task_instance context as dict. + :return: task_instance context as dict. """ params = dict() if in_env_var_format: diff --git a/airflow/www/security.py b/airflow/www/security.py index 7055228793364..2da1d70542b04 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -264,10 +264,14 @@ def has_access(self, permission, view_name, user=None): Verify whether a given user could perform certain permission (e.g can_read, can_write) on the given dag_id. - :param str permission: permission on dag_id(e.g can_read, can_edit). - :param str view_name: name of view-menu(e.g dag id is a view-menu as well). - :param str user: user name + :param permission: permission on dag_id(e.g can_read, can_edit). + :type permission: str + :param view_name: name of view-menu(e.g dag id is a view-menu as well). + :type permission: str + :param user: user name + :type permission: str :return: a bool whether user could perform certain permission on the dag_id. + :rtype bool """ if not user: user = g.user @@ -337,9 +341,10 @@ def _merge_perm(self, permission_name, view_menu_name): It will add the related entry to ab_permission and ab_view_menu two meta tables as well. - :param str permission_name: Name of the permission. - :param str view_menu_name: Name of the view-menu - + :param permission_name: Name of the permission. + :type permission_name: str + :param view_menu_name: Name of the view-menu + :type view_menu_name: str :return: """ permission = self.find_permission(permission_name) diff --git a/docs/code.rst b/docs/code.rst index 3aa926930062d..23c522ddd0f27 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -533,6 +533,8 @@ Executors are the mechanism by which task instances get run. .. autoclass:: airflow.executors.celery_executor.CeleryExecutor .. autoclass:: airflow.executors.local_executor.LocalExecutor .. autoclass:: airflow.executors.sequential_executor.SequentialExecutor +.. autoclass:: airflow.executors.dask_executor.DaskExecutor + Community-contributed executors ''''''''''''''''''''''''''''''' diff --git a/docs/concepts.rst b/docs/concepts.rst index e79c5b6890c62..7aea8dfe3c086 100644 --- a/docs/concepts.rst +++ b/docs/concepts.rst @@ -129,16 +129,23 @@ described elsewhere in this document. Airflow provides operators for many common tasks, including: -- ``BashOperator`` - executes a bash command -- ``PythonOperator`` - calls an arbitrary Python function -- ``EmailOperator`` - sends an email -- ``SimpleHttpOperator`` - sends an HTTP request -- ``MySqlOperator``, ``SqliteOperator``, ``PostgresOperator``, ``MsSqlOperator``, ``OracleOperator``, ``JdbcOperator``, etc. - executes a SQL command +- :class:`airflow.operators.bash_operator.BashOperator` - executes a bash command +- :class:`airflow.operators.python_operator.PythonOperator` - calls an arbitrary Python function +- :class:`airflow.operators.email_operator.EmailOperator` - sends an email +- :class:`airflow.operators.http_operator.SimpleHttpOperator` - sends an HTTP request +- :class:`airflow.operators.mysql_operator.MySqlOperator`, + :class:`airflow.operators.sqlite_operator.SqliteOperator`, + :class:`airflow.operators.postgres_operator.PostgresOperator`, + :class:`airflow.operators.mssql_operator.MsSqlOperator`, + :class:`airflow.operators.oracle_operator.OracleOperator`, + :class:`airflow.operators.jdbc_operator.JdbcOperator`, etc. - executes a SQL command - ``Sensor`` - waits for a certain time, file, database row, S3 key, etc... In addition to these basic building blocks, there are many more specific -operators: ``DockerOperator``, ``HiveOperator``, ``S3FileTransformOperator``, -``PrestoToMysqlOperator``, ``SlackOperator``... you get the idea! +operators: :class:`airflow.operators.docker_operator.DockerOperator`, +:class:`airflow.operators.hive_operator.HiveOperator`, :class:`airflow.operators.s3_file_transform_operator.S3FileTransformOperator(`, +:class:`airflow.operators.presto_to_mysql.PrestoToMySqlTransfer`, +:class:`airflow.operators.slack_operator.SlackAPIOperator`... you get the idea! The ``airflow/contrib/`` directory contains yet more operators built by the community. These operators aren't always as complete or well-tested as those in diff --git a/docs/conf.py b/docs/conf.py index dbd5393f05a4d..cdf41b10f0fa4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -52,6 +52,7 @@ 'cloudant', 'cx_Oracle', 'datadog', + 'distributed', 'docker', 'google', 'google_auth_httplib2', @@ -185,7 +186,14 @@ intersphinx_mapping = { - 'boto3': ('https://boto3.amazonaws.com/v1/documentation/api/latest', None), + 'boto3': ('https://boto3.amazonaws.com/v1/documentation/api/latest/', None), + 'google-cloud-python': ( + 'https://googleapis.github.io/google-cloud-python/latest/', None), + 'mongodb': ('https://api.mongodb.com/python/current/', None), + 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None), + 'python': ('https://docs.python.org/3/', None), + 'requests': ('http://docs.python-requests.org/en/master/', None), + 'sqlalchemy': ('https://docs.sqlalchemy.org/en/latest/', None), } # -- Options for HTML output ---------------------------------------------- diff --git a/docs/howto/executor/use-dask.rst b/docs/howto/executor/use-dask.rst index 6d3efcb349a76..08e923af022a8 100644 --- a/docs/howto/executor/use-dask.rst +++ b/docs/howto/executor/use-dask.rst @@ -18,7 +18,7 @@ Scaling Out with Dask ===================== -``DaskExecutor`` allows you to run Airflow tasks in a Dask Distributed cluster. +:class:`airflow.executors.dask_executor.DaskExecutor` allows you to run Airflow tasks in a Dask Distributed cluster. Dask clusters can be run on a single machine or on remote networks. For complete details, consult the `Distributed documentation `_. @@ -39,7 +39,7 @@ Next start at least one Worker on any machine that can connect to the host: dask-worker $DASK_HOST:$DASK_PORT -Edit your ``airflow.cfg`` to set your executor to ``DaskExecutor`` and provide +Edit your ``airflow.cfg`` to set your executor to :class:`airflow.executors.dask_executor.DaskExecutor` and provide the Dask Scheduler address in the ``[dask]`` section. Please note: diff --git a/docs/howto/executor/use-mesos.rst b/docs/howto/executor/use-mesos.rst index d67579ddcb96b..1cbe268298c93 100644 --- a/docs/howto/executor/use-mesos.rst +++ b/docs/howto/executor/use-mesos.rst @@ -26,7 +26,7 @@ There are two ways you can run airflow as a mesos framework: Tasks executed directly on mesos slaves --------------------------------------- -``MesosExecutor`` allows you to schedule airflow tasks on a Mesos cluster. +:class:`airflow.contrib.executors.mesos_executor.MesosExecutor` allows you to schedule airflow tasks on a Mesos cluster. For this to work, you need a running mesos cluster and you must perform the following steps - @@ -57,7 +57,7 @@ You can now see the airflow framework and corresponding tasks in mesos UI. The logs for airflow tasks can be seen in airflow UI as usual. For more information about mesos, refer to `mesos documentation `_. -For any queries/bugs on `MesosExecutor`, please contact `@kapil-malik `_. +For any queries/bugs on :class:`airflow.contrib.executors.mesos_executor.MesosExecutor`, please contact `@kapil-malik `_. Tasks executed in containers on mesos slaves -------------------------------------------- diff --git a/docs/installation.rst b/docs/installation.rst index 2c9436e2f997b..d9431d54bdd9a 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -47,77 +47,77 @@ these extra dependencies. Here's the list of the subpackages and what they enable: -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| subpackage | install command | enables | -+=====================+===================================================+=================================================+ -| all | ``pip install apache-airflow[all]`` | All Airflow features known to man | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| all_dbs | ``pip install apache-airflow[all_dbs]`` | All databases integrations | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| async | ``pip install apache-airflow[async]`` | Async worker classes for Gunicorn | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| celery | ``pip install apache-airflow[celery]`` | CeleryExecutor | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| cloudant | ``pip install apache-airflow[cloudant]`` | Cloudant hook | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| crypto | ``pip install apache-airflow[crypto]`` | Encrypt connection passwords in metadata db | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| devel | ``pip install apache-airflow[devel]`` | Minimum dev tools requirements | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| devel_hadoop | ``pip install apache-airflow[devel_hadoop]`` | Airflow + dependencies on the Hadoop stack | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| druid | ``pip install apache-airflow[druid]`` | Druid related operators & hooks | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| gcp_api | ``pip install apache-airflow[gcp_api]`` | Google Cloud Platform hooks and operators | -| | | (using ``google-api-python-client``) | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| github_enterprise | ``pip install apache-airflow[github_enterprise]`` | Github Enterprise auth backend | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| google_auth | ``pip install apache-airflow[google_auth]`` | Google auth backend | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| hdfs | ``pip install apache-airflow[hdfs]`` | HDFS hooks and operators | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| hive | ``pip install apache-airflow[hive]`` | All Hive related operators | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| jdbc | ``pip install apache-airflow[jdbc]`` | JDBC hooks and operators | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| kerberos | ``pip install apache-airflow[kerberos]`` | Kerberos integration for Kerberized Hadoop | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| kubernetes | ``pip install apache-airflow[kubernetes]`` | Kubernetes Executor and operator | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| ldap | ``pip install apache-airflow[ldap]`` | LDAP authentication for users | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| mssql | ``pip install apache-airflow[mssql]`` | Microsoft SQL Server operators and hook, | -| | | support as an Airflow backend | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| mysql | ``pip install apache-airflow[mysql]`` | MySQL operators and hook, support as an Airflow | -| | | backend. The version of MySQL server has to be | -| | | 5.6.4+. The exact version upper bound depends | -| | | on version of ``mysqlclient`` package. For | -| | | example, ``mysqlclient`` 1.3.12 can only be | -| | | used with MySQL server 5.6.4 through 5.7. | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| password | ``pip install apache-airflow[password]`` | Password authentication for users | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| postgres | ``pip install apache-airflow[postgres]`` | PostgreSQL operators and hook, support as an | -| | | Airflow backend | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| qds | ``pip install apache-airflow[qds]`` | Enable QDS (Qubole Data Service) support | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| rabbitmq | ``pip install apache-airflow[rabbitmq]`` | RabbitMQ support as a Celery backend | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| redis | ``pip install apache-airflow[redis]`` | Redis hooks and sensors | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| s3 | ``pip install apache-airflow[s3]`` | ``S3KeySensor``, ``S3PrefixSensor`` | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| samba | ``pip install apache-airflow[samba]`` | ``Hive2SambaOperator`` | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| slack | ``pip install apache-airflow[slack]`` | ``SlackAPIPostOperator`` | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| ssh | ``pip install apache-airflow[ssh]`` | SSH hooks and Operator | -+---------------------+---------------------------------------------------+-------------------------------------------------+ -| vertica | ``pip install apache-airflow[vertica]`` | Vertica hook support as an Airflow backend | -+---------------------+---------------------------------------------------+-------------------------------------------------+ ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| subpackage | install command | enables | ++=====================+===================================================+======================================================================+ +| all | ``pip install apache-airflow[all]`` | All Airflow features known to man | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| all_dbs | ``pip install apache-airflow[all_dbs]`` | All databases integrations | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| async | ``pip install apache-airflow[async]`` | Async worker classes for Gunicorn | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| celery | ``pip install apache-airflow[celery]`` | CeleryExecutor | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| cloudant | ``pip install apache-airflow[cloudant]`` | Cloudant hook | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| crypto | ``pip install apache-airflow[crypto]`` | Encrypt connection passwords in metadata db | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| devel | ``pip install apache-airflow[devel]`` | Minimum dev tools requirements | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| devel_hadoop | ``pip install apache-airflow[devel_hadoop]`` | Airflow + dependencies on the Hadoop stack | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| druid | ``pip install apache-airflow[druid]`` | Druid related operators & hooks | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| gcp_api | ``pip install apache-airflow[gcp_api]`` | Google Cloud Platform hooks and operators | +| | | (using ``google-api-python-client``) | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| github_enterprise | ``pip install apache-airflow[github_enterprise]`` | Github Enterprise auth backend | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| google_auth | ``pip install apache-airflow[google_auth]`` | Google auth backend | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| hdfs | ``pip install apache-airflow[hdfs]`` | HDFS hooks and operators | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| hive | ``pip install apache-airflow[hive]`` | All Hive related operators | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| jdbc | ``pip install apache-airflow[jdbc]`` | JDBC hooks and operators | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| kerberos | ``pip install apache-airflow[kerberos]`` | Kerberos integration for Kerberized Hadoop | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| kubernetes | ``pip install apache-airflow[kubernetes]`` | Kubernetes Executor and operator | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| ldap | ``pip install apache-airflow[ldap]`` | LDAP authentication for users | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| mssql | ``pip install apache-airflow[mssql]`` | Microsoft SQL Server operators and hook, | +| | | support as an Airflow backend | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| mysql | ``pip install apache-airflow[mysql]`` | MySQL operators and hook, support as an Airflow | +| | | backend. The version of MySQL server has to be | +| | | 5.6.4+. The exact version upper bound depends | +| | | on version of ``mysqlclient`` package. For | +| | | example, ``mysqlclient`` 1.3.12 can only be | +| | | used with MySQL server 5.6.4 through 5.7. | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| password | ``pip install apache-airflow[password]`` | Password authentication for users | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| postgres | ``pip install apache-airflow[postgres]`` | PostgreSQL operators and hook, support as an | +| | | Airflow backend | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| qds | ``pip install apache-airflow[qds]`` | Enable QDS (Qubole Data Service) support | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| rabbitmq | ``pip install apache-airflow[rabbitmq]`` | RabbitMQ support as a Celery backend | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| redis | ``pip install apache-airflow[redis]`` | Redis hooks and sensors | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| s3 | ``pip install apache-airflow[s3]`` | ``S3KeySensor``, ``S3PrefixSensor`` | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| samba | ``pip install apache-airflow[samba]`` | :class:`airflow.operators.hive_to_samba_operator.Hive2SambaOperator` | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| slack | ``pip install apache-airflow[slack]`` | :class:`airflow.operators.slack_operator.SlackAPIOperator` | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| ssh | ``pip install apache-airflow[ssh]`` | SSH hooks and Operator | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| vertica | ``pip install apache-airflow[vertica]`` | Vertica hook support as an Airflow backend | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ Initiating Airflow Database ''''''''''''''''''''''''''' diff --git a/docs/scheduler.rst b/docs/scheduler.rst index 4b8fb2c38d636..67713216142dd 100644 --- a/docs/scheduler.rst +++ b/docs/scheduler.rst @@ -39,9 +39,9 @@ has ended. start date, at the END of the period. The scheduler starts an instance of the executor specified in the your -``airflow.cfg``. If it happens to be the ``LocalExecutor``, tasks will be -executed as subprocesses; in the case of ``CeleryExecutor``, ``DaskExecutor``, and -``MesosExecutor``, tasks are executed remotely. +``airflow.cfg``. If it happens to be the :class:`airflow.contrib.executors.local_executor.LocalExecutor`, tasks will be +executed as subprocesses; in the case of :class:`airflow.executors.celery_executor.CeleryExecutor`, :class:`airflow.executors.dask_executor.DaskExecutor``, and +:class:`airflow.contrib.executors.mesos_executor.MesosExecutor`, tasks are executed remotely. To start a scheduler, simply run the command: diff --git a/docs/start.rst b/docs/start.rst index c998c1306ffab..4c728f95f2420 100644 --- a/docs/start.rst +++ b/docs/start.rst @@ -50,7 +50,7 @@ if started by systemd. Out of the box, Airflow uses a sqlite database, which you should outgrow fairly quickly since no parallelization is possible using this database -backend. It works in conjunction with the ``SequentialExecutor`` which will +backend. It works in conjunction with the :class:`airflow.executors.sequential_executor.SequentialExecutor` which will only run task instances sequentially. While this is very limiting, it allows you to get up and running quickly and take a tour of the UI and the command line utilities. From e60710657b134aeefe34c3e8d0a06107764cb8ff Mon Sep 17 00:00:00 2001 From: Chris Nordqvist Date: Thu, 7 Feb 2019 16:15:33 +0100 Subject: [PATCH 0058/1104] [AIRFLOW-XXX] Correct typo for `prev_ds` (#4664) `@weekly` probably shouldn't yield the date from 2 years + 1 week ago :) --- docs/code.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/code.rst b/docs/code.rst index 23c522ddd0f27..5ccc2f438fab1 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -334,7 +334,7 @@ Variable Description ``{{ ds_nodash }}`` the execution date as ``YYYYMMDD`` ``{{ prev_ds }}`` the previous execution date as ``YYYY-MM-DD`` if ``{{ ds }}`` is ``2018-01-08`` and ``schedule_interval`` is ``@weekly``, - ``{{ prev_ds }}`` will be ``2016-01-01`` + ``{{ prev_ds }}`` will be ``2018-01-01`` ``{{ prev_ds_nodash }}`` the previous execution date as ``YYYYMMDD`` if exists, else ``None`` ``{{ next_ds }}`` the next execution date as ``YYYY-MM-DD`` if ``{{ ds }}`` is ``2018-01-01`` and ``schedule_interval`` is ``@weekly``, From f2fe587f59a082bde0e6059bc708a99cd4d5d442 Mon Sep 17 00:00:00 2001 From: Mathew Wicks Date: Fri, 8 Feb 2019 22:54:11 +1300 Subject: [PATCH 0059/1104] [AIRFLOW-3547] Fixed Jinja templating in SparkSubmitOperator (#4347) * [AIRFLOW-3547] Fixed Jinja templating in SparkSubmitOperator This is a minor change to allow Jinja templating in parameters where it makes sense for SparkSubmitOperator. * [AIRFLOW-3547] Fixed Jinja templating in SparkSubmitOperator This is a minor change to allow Jinja templating in parameters where it makes sense for SparkSubmitOperator. --- .../operators/spark_submit_operator.py | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/airflow/contrib/operators/spark_submit_operator.py b/airflow/contrib/operators/spark_submit_operator.py index 2803850230d4d..93dc08f1f77e0 100644 --- a/airflow/contrib/operators/spark_submit_operator.py +++ b/airflow/contrib/operators/spark_submit_operator.py @@ -29,31 +29,30 @@ class SparkSubmitOperator(BaseOperator): It requires that the "spark-submit" binary is in the PATH or the spark-home is set in the extra on the connection. - :param application: The application that submitted as a job, either jar or - py file. (templated) + :param application: The application that submitted as a job, either jar or py file. (templated) :type application: str - :param conf: Arbitrary Spark configuration properties + :param conf: Arbitrary Spark configuration properties (templated) :type conf: dict :param conn_id: The connection id as configured in Airflow administration. When an invalid connection_id is supplied, it will default to yarn. :type conn_id: str :param files: Upload additional files to the executor running the job, separated by a comma. Files will be placed in the working directory of each executor. - For example, serialized objects. + For example, serialized objects. (templated) :type files: str - :param py_files: Additional python files used by the job, can be .zip, .egg or .py. + :param py_files: Additional python files used by the job, can be .zip, .egg or .py. (templated) :type py_files: str - :param jars: Submit additional jars to upload and place them in executor classpath. - :param driver_classpath: Additional, driver-specific, classpath settings. - :type driver_classpath: str + :param jars: Submit additional jars to upload and place them in executor classpath. (templated) :type jars: str + :param driver_classpath: Additional, driver-specific, classpath settings. (templated) + :type driver_classpath: str :param java_class: the main class of the Java application :type java_class: str :param packages: Comma-separated list of maven coordinates of jars to include on the driver and executor classpaths. (templated) :type packages: str :param exclude_packages: Comma-separated list of maven coordinates of jars to exclude - while resolving the dependencies provided in 'packages' + while resolving the dependencies provided in 'packages' (templated) :type exclude_packages: str :param repositories: Comma-separated list of additional remote repositories to search for the maven coordinates given with 'packages' @@ -61,25 +60,23 @@ class SparkSubmitOperator(BaseOperator): :param total_executor_cores: (Standalone & Mesos only) Total cores for all executors (Default: all the available cores on the worker) :type total_executor_cores: int - :param executor_cores: (Standalone & YARN only) Number of cores per executor - (Default: 2) + :param executor_cores: (Standalone & YARN only) Number of cores per executor (Default: 2) :type executor_cores: int :param executor_memory: Memory per executor (e.g. 1000M, 2G) (Default: 1G) :type executor_memory: str :param driver_memory: Memory allocated to the driver (e.g. 1000M, 2G) (Default: 1G) :type driver_memory: str - :param keytab: Full path to the file that contains the keytab + :param keytab: Full path to the file that contains the keytab (templated) :type keytab: str - :param principal: The name of the kerberos principal used for keytab + :param principal: The name of the kerberos principal used for keytab (templated) :type principal: str :param name: Name of the job (default airflow-spark). (templated) :type name: str :param num_executors: Number of executors to launch :type num_executors: int - :param application_args: Arguments for the application being submitted + :param application_args: Arguments for the application being submitted (templated) :type application_args: list - :param env_vars: Environment variables for spark-submit. It - supports yarn and k8s mode too. + :param env_vars: Environment variables for spark-submit. It supports yarn and k8s mode too. (templated) :type env_vars: dict :param verbose: Whether to pass the verbose flag to spark-submit process for debugging :type verbose: bool @@ -87,7 +84,9 @@ class SparkSubmitOperator(BaseOperator): Some distros may use spark2-submit. :type spark_binary: string """ - template_fields = ('_name', '_application_args', '_packages') + template_fields = ('_application', '_conf', '_files', '_py_files', '_jars', '_driver_classpath', + '_packages', '_exclude_packages', '_keytab', '_principal', '_name', + '_application_args', '_env_vars') ui_color = WEB_COLORS['LIGHTORANGE'] @apply_defaults From dbeb6a8b0f611183b3ff525f4f22b953905961f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Fri, 8 Feb 2019 11:22:48 +0100 Subject: [PATCH 0060/1104] [AIRFLOW-XXX] Add backreference in docs between operator and integration (#4671) --- .../operators/gcp_bigtable_operator.py | 24 ++++++++ .../contrib/operators/gcp_compute_operator.py | 20 +++++++ .../operators/gcp_function_operator.py | 8 +++ .../contrib/operators/gcp_spanner_operator.py | 20 +++++++ airflow/contrib/operators/gcp_sql_operator.py | 36 ++++++++++++ airflow/contrib/operators/gcs_acl_operator.py | 8 +++ airflow/contrib/operators/gcs_to_bq.py | 4 ++ airflow/operators/bash_operator.py | 4 ++ airflow/operators/python_operator.py | 4 ++ docs/howto/operator.rst | 57 ++++++++++++++++++- 10 files changed, 183 insertions(+), 2 deletions(-) diff --git a/airflow/contrib/operators/gcp_bigtable_operator.py b/airflow/contrib/operators/gcp_bigtable_operator.py index e100af12ad748..48fd632de024d 100644 --- a/airflow/contrib/operators/gcp_bigtable_operator.py +++ b/airflow/contrib/operators/gcp_bigtable_operator.py @@ -51,6 +51,10 @@ class BigtableInstanceCreateOperator(BaseOperator, BigtableValidationMixin): For more details about instance creation have a look at the reference: https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.create + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BigtableInstanceCreateOperator` + :type instance_id: str :param instance_id: The ID of the Cloud Bigtable instance to create. :type main_cluster_id: str @@ -157,6 +161,10 @@ class BigtableInstanceDeleteOperator(BaseOperator, BigtableValidationMixin): For more details about deleting instance have a look at the reference: https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.delete + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BigtableInstanceDeleteOperator` + :type instance_id: str :param instance_id: The ID of the Cloud Bigtable instance to delete. :param project_id: Optional, the ID of the GCP project. If set to None or missing, @@ -199,6 +207,10 @@ class BigtableTableCreateOperator(BaseOperator, BigtableValidationMixin): For more details about creating table have a look at the reference: https://googleapis.github.io/google-cloud-python/latest/bigtable/table.html#google.cloud.bigtable.table.Table.create + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BigtableTableCreateOperator` + :type instance_id: str :param instance_id: The ID of the Cloud Bigtable instance that will hold the new table. @@ -290,6 +302,10 @@ class BigtableTableDeleteOperator(BaseOperator, BigtableValidationMixin): For more details about deleting table have a look at the reference: https://googleapis.github.io/google-cloud-python/latest/bigtable/table.html#google.cloud.bigtable.table.Table.delete + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BigtableTableDeleteOperator` + :type instance_id: str :param instance_id: The ID of the Cloud Bigtable instance. :type table_id: str @@ -348,6 +364,10 @@ class BigtableClusterUpdateOperator(BaseOperator, BigtableValidationMixin): have a look at the reference: https://googleapis.github.io/google-cloud-python/latest/bigtable/cluster.html#google.cloud.bigtable.cluster.Cluster.update + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BigtableClusterUpdateOperator` + :type instance_id: str :param instance_id: The ID of the Cloud Bigtable instance. :type cluster_id: str @@ -405,6 +425,10 @@ class BigtableTableWaitForReplicationSensor(BaseSensorOperator, BigtableValidati For more details about cluster states for a table, have a look at the reference: https://googleapis.github.io/google-cloud-python/latest/bigtable/table.html#google.cloud.bigtable.table.Table.get_cluster_states + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BigtableTableWaitForReplicationSensor` + :type instance_id: str :param instance_id: The ID of the Cloud Bigtable instance. :type table_id: str diff --git a/airflow/contrib/operators/gcp_compute_operator.py b/airflow/contrib/operators/gcp_compute_operator.py index 0645ce6ce856f..451692ac30f8d 100644 --- a/airflow/contrib/operators/gcp_compute_operator.py +++ b/airflow/contrib/operators/gcp_compute_operator.py @@ -67,6 +67,10 @@ class GceInstanceStartOperator(GceBaseOperator): """ Starts an instance in Google Compute Engine. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GceInstanceStartOperator` + :param zone: Google Cloud Platform zone where the instance exists. :type zone: str :param resource_id: Name of the Compute Engine instance resource. @@ -110,6 +114,10 @@ class GceInstanceStopOperator(GceBaseOperator): """ Stops an instance in Google Compute Engine. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GceInstanceStopOperator` + :param zone: Google Cloud Platform zone where the instance exists. :type zone: str :param resource_id: Name of the Compute Engine instance resource. @@ -159,6 +167,10 @@ class GceSetMachineTypeOperator(GceBaseOperator): Changes the machine type for a stopped instance to the machine type specified in the request. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GceSetMachineTypeOperator` + :param zone: Google Cloud Platform zone where the instance exists. :type zone: str :param resource_id: Name of the Compute Engine instance resource. @@ -267,6 +279,10 @@ class GceInstanceTemplateCopyOperator(GceBaseOperator): """ Copies the instance template, applying specified changes. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GceInstanceTemplateCopyOperator` + :param resource_id: Name of the Instance Template :type resource_id: str :param body_patch: Patch to the body of instanceTemplates object following rfc7386 @@ -371,6 +387,10 @@ class GceInstanceGroupManagerUpdateTemplateOperator(GceBaseOperator): destination one. API V1 does not have update/patch operations for Instance Group Manager, so you must use beta or newer API version. Beta is the default. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GceInstanceGroupManagerUpdateTemplateOperator` + :param resource_id: Name of the Instance Group Manager :type resource_id: str :param zone: Google Cloud Platform zone where the Instance Group Manager exists. diff --git a/airflow/contrib/operators/gcp_function_operator.py b/airflow/contrib/operators/gcp_function_operator.py index 420fd12bff0d7..5fc3b82794b80 100644 --- a/airflow/contrib/operators/gcp_function_operator.py +++ b/airflow/contrib/operators/gcp_function_operator.py @@ -83,6 +83,10 @@ class GcfFunctionDeployOperator(BaseOperator): Creates a function in Google Cloud Functions. If a function with this name already exists, it will be updated. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GcfFunctionDeployOperator` + :param location: Google Cloud Platform region where the function should be created. :type location: str :param body: Body of the Cloud Functions definition. The body must be a @@ -274,6 +278,10 @@ class GcfFunctionDeleteOperator(BaseOperator): """ Deletes the specified function from Google Cloud Functions. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GcfFunctionDeleteOperator` + :param name: A fully-qualified function name, matching the pattern: `^projects/[^/]+/locations/[^/]+/functions/[^/]+$` :type name: str diff --git a/airflow/contrib/operators/gcp_spanner_operator.py b/airflow/contrib/operators/gcp_spanner_operator.py index f78fcbde3211c..bea4f676e3ccc 100644 --- a/airflow/contrib/operators/gcp_spanner_operator.py +++ b/airflow/contrib/operators/gcp_spanner_operator.py @@ -98,6 +98,10 @@ class CloudSpannerInstanceDeleteOperator(BaseOperator): Deletes a Cloud Spanner instance. If an instance does not exist, no action is taken and the operator succeeds. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSpannerInstanceDeleteOperator` + :param instance_id: The Cloud Spanner instance ID. :type instance_id: str :param project_id: Optional, the ID of the project that owns the Cloud Spanner @@ -144,6 +148,10 @@ class CloudSpannerInstanceDatabaseQueryOperator(BaseOperator): """ Executes an arbitrary DML query (INSERT, UPDATE, DELETE). + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSpannerInstanceDatabaseQueryOperator` + :param instance_id: The Cloud Spanner instance ID. :type instance_id: str :param database_id: The Cloud Spanner database ID. @@ -216,6 +224,10 @@ class CloudSpannerInstanceDatabaseDeployOperator(BaseOperator): Creates a new Cloud Spanner database, or if database exists, the operator does nothing. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSpannerInstanceDatabaseDeployOperator` + :param instance_id: The Cloud Spanner instance ID. :type instance_id: str :param database_id: The Cloud Spanner database ID. @@ -283,6 +295,10 @@ class CloudSpannerInstanceDatabaseUpdateOperator(BaseOperator): """ Updates a Cloud Spanner database with the specified DDL statement. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSpannerInstanceDatabaseUpdateOperator` + :param instance_id: The Cloud Spanner instance ID. :type instance_id: str :param database_id: The Cloud Spanner database ID. @@ -357,6 +373,10 @@ class CloudSpannerInstanceDatabaseDeleteOperator(BaseOperator): """ Deletes a Cloud Spanner database. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSpannerInstanceDatabaseDeleteOperator` + :param instance_id: Cloud Spanner instance ID. :type instance_id: str :param database_id: Cloud Spanner database ID. diff --git a/airflow/contrib/operators/gcp_sql_operator.py b/airflow/contrib/operators/gcp_sql_operator.py index ece91cf40456f..cdce4baa7591b 100644 --- a/airflow/contrib/operators/gcp_sql_operator.py +++ b/airflow/contrib/operators/gcp_sql_operator.py @@ -205,6 +205,10 @@ class CloudSqlInstanceCreateOperator(CloudSqlBaseOperator): If an instance with the same name exists, no action will be taken and the operator will succeed. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlInstanceCreateOperator` + :param body: Body required by the Cloud SQL insert API, as described in https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/instances/insert #request-body @@ -278,6 +282,10 @@ class CloudSqlInstancePatchOperator(CloudSqlBaseOperator): to the rules of patch semantics. https://cloud.google.com/sql/docs/mysql/admin-api/how-tos/performance#patch + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlInstancePatchOperator` + :param body: Body required by the Cloud SQL patch API, as described in https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/instances/patch#request-body :type body: dict @@ -329,6 +337,10 @@ class CloudSqlInstanceDeleteOperator(CloudSqlBaseOperator): """ Deletes a Cloud SQL instance. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlInstanceDeleteOperator` + :param instance: Cloud SQL instance ID. This does not include the project ID. :type instance: str :param project_id: Optional, Google Cloud Platform Project ID. If set to None or missing, @@ -369,6 +381,10 @@ class CloudSqlInstanceDatabaseCreateOperator(CloudSqlBaseOperator): """ Creates a new database inside a Cloud SQL instance. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlInstanceDatabaseCreateOperator` + :param instance: Database instance ID. This does not include the project ID. :type instance: str :param body: The request body, as described in @@ -438,6 +454,10 @@ class CloudSqlInstanceDatabasePatchOperator(CloudSqlBaseOperator): instance using patch semantics. See: https://cloud.google.com/sql/docs/mysql/admin-api/how-tos/performance#patch + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlInstanceDatabasePatchOperator` + :param instance: Database instance ID. This does not include the project ID. :type instance: str :param database: Name of the database to be updated in the instance. @@ -507,6 +527,10 @@ class CloudSqlInstanceDatabaseDeleteOperator(CloudSqlBaseOperator): """ Deletes a database from a Cloud SQL instance. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlInstanceDatabaseDeleteOperator` + :param instance: Database instance ID. This does not include the project ID. :type instance: str :param database: Name of the database to be deleted in the instance. @@ -563,6 +587,10 @@ class CloudSqlInstanceExportOperator(CloudSqlBaseOperator): Note: This operator is idempotent. If executed multiple times with the same export file URI, the export file in GCS will simply be overridden. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlInstanceImportOperator` + :param instance: Cloud SQL instance ID. This does not include the project ID. :type instance: str :param body: The request body, as described in @@ -635,6 +663,10 @@ class CloudSqlInstanceImportOperator(CloudSqlBaseOperator): If the import file was generated in a different way, idempotence is not guaranteed. It has to be ensured on the SQL file level. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlInstanceImportOperator` + :param instance: Cloud SQL instance ID. This does not include the project ID. :type instance: str :param body: The request body, as described in @@ -692,6 +724,10 @@ class CloudSqlQueryOperator(BaseOperator): Performs DML or DDL query on an existing Cloud Sql instance. It optionally uses cloud-sql-proxy to establish secure connection with the database. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudSqlQueryOperator` + :param sql: SQL query or list of queries to run (should be DML or DDL query - this operator does not return any data from the database, so it is useless to pass it DQL queries. Note that it is responsibility of the diff --git a/airflow/contrib/operators/gcs_acl_operator.py b/airflow/contrib/operators/gcs_acl_operator.py index a39b8cf5d9616..03c72ce328c93 100644 --- a/airflow/contrib/operators/gcs_acl_operator.py +++ b/airflow/contrib/operators/gcs_acl_operator.py @@ -26,6 +26,10 @@ class GoogleCloudStorageBucketCreateAclEntryOperator(BaseOperator): """ Creates a new ACL entry on the specified bucket. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GoogleCloudStorageBucketCreateAclEntryOperator` + :param bucket: Name of a bucket. :type bucket: str :param entity: The entity holding the permission, in one of the following forms: @@ -69,6 +73,10 @@ class GoogleCloudStorageObjectCreateAclEntryOperator(BaseOperator): """ Creates a new ACL entry on the specified object. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GoogleCloudStorageObjectCreateAclEntryOperator` + :param bucket: Name of a bucket. :type bucket: str :param object_name: Name of the object. For information about how to URL encode object diff --git a/airflow/contrib/operators/gcs_to_bq.py b/airflow/contrib/operators/gcs_to_bq.py index c5337ce34cb8a..c26b2fa1c7142 100644 --- a/airflow/contrib/operators/gcs_to_bq.py +++ b/airflow/contrib/operators/gcs_to_bq.py @@ -34,6 +34,10 @@ class GoogleCloudStorageToBigQueryOperator(BaseOperator): point the operator to a Google cloud storage object name. The object in Google cloud storage must be a JSON file with the schema fields in it. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GoogleCloudStorageToBigQueryOperator` + :param bucket: The bucket to load from. (templated) :type bucket: str :param source_objects: List of Google cloud storage URIs to load from. (templated) diff --git a/airflow/operators/bash_operator.py b/airflow/operators/bash_operator.py index a2217adf4001c..58fb60497b77e 100644 --- a/airflow/operators/bash_operator.py +++ b/airflow/operators/bash_operator.py @@ -35,6 +35,10 @@ class BashOperator(BaseOperator): """ Execute a Bash script, command or set of commands. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BashOperator` + :param bash_command: The command, set of commands or reference to a bash script (must be '.sh') to be executed. (templated) :type bash_command: str diff --git a/airflow/operators/python_operator.py b/airflow/operators/python_operator.py index 755f39e0160e4..52ad0db7c2de8 100644 --- a/airflow/operators/python_operator.py +++ b/airflow/operators/python_operator.py @@ -39,6 +39,10 @@ class PythonOperator(BaseOperator): """ Executes a Python callable + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:PythonOperator` + :param python_callable: A reference to an object that is callable :type python_callable: python callable :param op_kwargs: a dictionary of keyword arguments that will get unpacked diff --git a/docs/howto/operator.rst b/docs/howto/operator.rst index 686afd1b05907..621215c92f4d9 100644 --- a/docs/howto/operator.rst +++ b/docs/howto/operator.rst @@ -27,6 +27,8 @@ information. .. contents:: :local: +.. _howto/operator:BashOperator: + BashOperator ------------ @@ -71,6 +73,8 @@ template to it, which will fail. bash_command="/home/batcher/test.sh ", dag=dag) +.. _howto/operator:PythonOperator: + PythonOperator -------------- @@ -106,6 +110,8 @@ is evaluated as a :ref:`Jinja template `. Google Cloud Storage Operators ------------------------------ +.. _howto/operator:GoogleCloudStorageToBigQueryOperator: + GoogleCloudStorageToBigQueryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -123,6 +129,8 @@ to execute a BigQuery load job. Google Compute Engine Operators ------------------------------- +.. _howto/operator:GceInstanceStartOperator: + GceInstanceStartOperator ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -177,6 +185,7 @@ More information See `Google Compute Engine API documentation for start `_. +.. _howto/operator:GceInstanceStopOperator: GceInstanceStopOperator ^^^^^^^^^^^^^^^^^^^^^^^ @@ -231,6 +240,7 @@ More information See `Google Compute Engine API documentation for stop `_. +.. _howto/operator:GceSetMachineTypeOperator: GceSetMachineTypeOperator ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -291,6 +301,7 @@ More information See `Google Compute Engine API documentation for setMachineType `_. +.. _howto/operator:GceInstanceTemplateCopyOperator: GceInstanceTemplateCopyOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -351,6 +362,8 @@ More information See `Google Compute Engine API documentation for instanceTemplates `_. +.. _howto/operator:GceInstanceGroupManagerUpdateTemplateOperator: + GceInstanceGroupManagerUpdateTemplateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -431,6 +444,7 @@ All examples below rely on the following variables, which can be passed via envi :start-after: [START howto_operator_gcp_bigtable_args] :end-before: [END howto_operator_gcp_bigtable_args] +.. _howto/operator:BigtableInstanceCreateOperator: BigtableInstanceCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -453,6 +467,8 @@ it will be retrieved from the GCP connection used. Both variants are shown: :start-after: [START howto_operator_gcp_bigtable_instance_create] :end-before: [END howto_operator_gcp_bigtable_instance_create] +.. _howto/operator:BigtableInstanceDeleteOperator: + BigtableInstanceDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -471,6 +487,8 @@ it will be retrieved from the GCP connection used. Both variants are shown: :start-after: [START howto_operator_gcp_bigtable_instance_delete] :end-before: [END howto_operator_gcp_bigtable_instance_delete] +.. _howto/operator:BigtableClusterUpdateOperator: + BigtableClusterUpdateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -489,6 +507,8 @@ it will be retrieved from the GCP connection used. Both variants are shown: :start-after: [START howto_operator_gcp_bigtable_cluster_update] :end-before: [END howto_operator_gcp_bigtable_cluster_update] +.. _howto/operator:BigtableTableCreateOperator: + BigtableTableCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -519,6 +539,7 @@ Please refer to the Python Client for Google Cloud Bigtable documentation `for Table `_ and `for Column Families `_. +.. _howto/operator:BigtableTableDeleteOperator: BigtableTableDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -538,6 +559,8 @@ it will be retrieved from the GCP connection used. Both variants are shown: :start-after: [START howto_operator_gcp_bigtable_table_delete] :end-before: [END howto_operator_gcp_bigtable_table_delete] +.. _howto/operator:BigtableTableWaitForReplicationSensor: + BigtableTableWaitForReplicationSensor ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -564,6 +587,8 @@ Using the operator Google Cloud Functions Operators -------------------------------- +.. _howto/operator:GcfFunctionDeleteOperator: + GcfFunctionDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -607,6 +632,8 @@ More information See `Google Cloud Functions API documentation for delete `_. +.. _howto/operator:GcfFunctionDeployOperator: + GcfFunctionDeployOperator ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -742,6 +769,8 @@ See `Google Cloud Functions API documentation for create Google Cloud Spanner Operators ------------------------------ +.. _howto/operator:CloudSpannerInstanceDatabaseDeleteOperator: + CloudSpannerInstanceDatabaseDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -788,6 +817,7 @@ More information See `Google Cloud Spanner API documentation for database drop call `_. +.. _howto/operator:CloudSpannerInstanceDatabaseDeployOperator: CloudSpannerInstanceDatabaseDeployOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -837,6 +867,8 @@ More information See Google Cloud Spanner API documentation for `database create `_ +.. _howto/operator:CloudSpannerInstanceDatabaseUpdateOperator: + CloudSpannerInstanceDatabaseUpdateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -895,6 +927,8 @@ More information See Google Cloud Spanner API documentation for `database update_ddl `_. +.. _howto/operator:CloudSpannerInstanceDatabaseQueryOperator: + CloudSpannerInstanceDatabaseQueryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -940,6 +974,8 @@ More information See Google Cloud Spanner API documentation for `the DML syntax `_. +.. _howto/operator:CloudSpannerInstanceDeleteOperator: + CloudSpannerInstanceDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -989,6 +1025,8 @@ See `Google Cloud Spanner API documentation for instance delete Google Cloud Sql Operators -------------------------- +.. _howto/operator:CloudSqlInstanceDatabaseCreateOperator: + CloudSqlInstanceDatabaseCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1041,6 +1079,8 @@ More information See `Google Cloud SQL API documentation for database insert `_. +.. _howto/operator:CloudSqlInstanceDatabaseDeleteOperator: + CloudSqlInstanceDatabaseDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1086,6 +1126,8 @@ More information See `Google Cloud SQL API documentation for database delete `_. +.. _howto/operator:CloudSqlInstanceDatabasePatchOperator: + CloudSqlInstanceDatabasePatchOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1140,6 +1182,8 @@ More information See `Google Cloud SQL API documentation for database patch `_. +.. _howto/operator:CloudSqlInstanceDeleteOperator: + CloudSqlInstanceDeleteOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1185,7 +1229,7 @@ More information See `Google Cloud SQL API documentation for delete `_. -.. CloudSqlInstanceExportOperator: +.. _howto/operator:CloudSqlInstanceExportOperator: CloudSqlInstanceExportOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1269,7 +1313,7 @@ as shown in the example: :end-before: [END howto_operator_cloudsql_export_gcs_permissions] -.. CloudSqlInstanceImportOperator: +.. _howto/operator:CloudSqlInstanceImportOperator: CloudSqlInstanceImportOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1364,6 +1408,8 @@ as shown in the example: :start-after: [START howto_operator_cloudsql_import_gcs_permissions] :end-before: [END howto_operator_cloudsql_import_gcs_permissions] +.. _howto/operator:CloudSqlInstanceCreateOperator: + CloudSqlInstanceCreateOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1419,6 +1465,8 @@ More information See `Google Cloud SQL API documentation for insert `_. +.. _howto/operator:CloudSqlInstancePatchOperator: + CloudSqlInstancePatchOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1475,6 +1523,7 @@ More information See `Google Cloud SQL API documentation for patch `_. +.. _howto/operator:CloudSqlQueryOperator: CloudSqlQueryOperator ^^^^^^^^^^^^^^^^^^^^^ @@ -1569,6 +1618,8 @@ See `Google Cloud SQL Proxy documentation Google Cloud Storage Operators ------------------------------ +.. _howto/operator:GoogleCloudStorageBucketCreateAclEntryOperator: + GoogleCloudStorageBucketCreateAclEntryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1611,6 +1662,8 @@ More information See `Google Cloud Storage BucketAccessControls insert documentation `_. +.. _howto/operator:GoogleCloudStorageObjectCreateAclEntryOperator: + GoogleCloudStorageObjectCreateAclEntryOperator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From 6d229b240bff849bb83eef00922be32efd340630 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Fri, 8 Feb 2019 11:23:32 +0100 Subject: [PATCH 0061/1104] [AIRFLOW-3707] Group subpackages/extras by cloud providers (#4524) --- UPDATING.md | 16 +++++++++++ airflow/utils/log/s3_task_handler.py | 2 +- docs/installation.rst | 10 ++++--- setup.py | 35 +++++++++++-------------- tests/utils/log/test_s3_task_handler.py | 2 +- 5 files changed, 39 insertions(+), 26 deletions(-) diff --git a/UPDATING.md b/UPDATING.md index 73965a810a424..c3d0ee4e0789f 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -24,6 +24,22 @@ assists users migrating to a new version. ## Airflow Master +### Renamed "extra" requirments for cloud providers + +Subpackages for specific services have been combined into one variant for +each cloud provider. + +If you want to install integration for Microsoft Azure, then instead of +``` +pip install apache-airflow[azure_blob_storage,azure_data_lake,azure_cosmos,azure_container_instances] +``` +you should execute `pip install apache-airflow[azure]` + +If you want to install integration for Amazon Web Services, then instead of +`pip install apache-airflow[s3,emr]`, you should execute `pip install apache-airflow[aws]` + +The integration with GCP is unchanged. + ## Changes in Google Cloud Platform related operators Most GCP-related operators have now optional `PROJECT_ID` parameter. In case you do not specify it, diff --git a/airflow/utils/log/s3_task_handler.py b/airflow/utils/log/s3_task_handler.py index 196aec50f1bd4..ffd2d101d7518 100644 --- a/airflow/utils/log/s3_task_handler.py +++ b/airflow/utils/log/s3_task_handler.py @@ -45,7 +45,7 @@ def _build_hook(self): except Exception: self.log.error( 'Could not create an S3Hook with connection id "%s". ' - 'Please make sure that airflow[s3] is installed and ' + 'Please make sure that airflow[aws] is installed and ' 'the S3 connection exists.', remote_conn_id ) diff --git a/docs/installation.rst b/docs/installation.rst index d9431d54bdd9a..11523180314a8 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -27,11 +27,11 @@ The easiest way to install the latest stable version of Airflow is with ``pip``: pip install apache-airflow -You can also install Airflow with support for extra features like ``s3`` or ``postgres``: +You can also install Airflow with support for extra features like ``gcp_api`` or ``postgres``: .. code-block:: bash - pip install apache-airflow[postgres,s3] + pip install apache-airflow[postgres,gcp_api] Extra Packages '''''''''''''' @@ -56,6 +56,10 @@ Here's the list of the subpackages and what they enable: +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ | async | ``pip install apache-airflow[async]`` | Async worker classes for Gunicorn | +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| azure | ``pip install apache-airflow[azure]`` | Microsoft Azure | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ +| aws | ``pip install apache-airflow[aws]`` | Amazon Web Services | ++---------------------+---------------------------------------------------+----------------------------------------------------------------------+ | celery | ``pip install apache-airflow[celery]`` | CeleryExecutor | +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ | cloudant | ``pip install apache-airflow[cloudant]`` | Cloudant hook | @@ -108,8 +112,6 @@ Here's the list of the subpackages and what they enable: +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ | redis | ``pip install apache-airflow[redis]`` | Redis hooks and sensors | +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ -| s3 | ``pip install apache-airflow[s3]`` | ``S3KeySensor``, ``S3PrefixSensor`` | -+---------------------+---------------------------------------------------+----------------------------------------------------------------------+ | samba | ``pip install apache-airflow[samba]`` | :class:`airflow.operators.hive_to_samba_operator.Hive2SambaOperator` | +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ | slack | ``pip install apache-airflow[slack]`` | :class:`airflow.operators.slack_operator.SlackAPIOperator` | diff --git a/setup.py b/setup.py index 1cbe29b64ad05..8450d4555ece8 100644 --- a/setup.py +++ b/setup.py @@ -131,14 +131,17 @@ def write_version(filename=os.path.join(*['airflow', 'gevent>=0.13' ] atlas = ['atlasclient>=0.1.2'] -azure_blob_storage = ['azure-storage>=0.34.0'] -azure_data_lake = [ +aws = [ + 'boto3>=1.7.0, <1.8.0', +] +azure = [ + 'azure-storage>=0.34.0', 'azure-mgmt-resource==1.2.2', 'azure-mgmt-datalake-store==0.4.0', - 'azure-datalake-store==0.0.19' + 'azure-datalake-store==0.0.19', + 'azure-cosmos>=3.0.1', + 'azure-mgmt-containerinstance', ] -azure_cosmos = ['azure-cosmos>=3.0.1'] -azure_container_instances = ['azure-mgmt-containerinstance'] cassandra = ['cassandra-driver>=3.13.0'] celery = [ 'celery>=4.1.1, <4.2.0', @@ -168,7 +171,6 @@ def write_version(filename=os.path.join(*['airflow', 'elasticsearch>=5.0.0,<6.0.0', 'elasticsearch-dsl>=5.0.0,<6.0.0' ] -emr = ['boto3>=1.0.0, <1.8.0'] gcp_api = [ 'httplib2>=0.9.2', 'google-api-python-client>=1.6.0, <2.0.0dev', @@ -210,7 +212,6 @@ def write_version(filename=os.path.join(*['airflow', qds = ['qds-sdk>=1.10.4'] rabbitmq = ['librabbitmq>=1.6.1'] redis = ['redis>=2.10.5,<3.0.0'] -s3 = ['boto3>=1.7.0, <1.8.0'] salesforce = ['simple-salesforce>=0.72'] samba = ['pysmbclient>=0.1.3'] segment = ['analytics-python>=1.2.9'] @@ -254,14 +255,13 @@ def write_version(filename=os.path.join(*['airflow', if not PY3: devel += ['unittest2'] -devel_minreq = devel + kubernetes + mysql + doc + password + s3 + cgroups +devel_minreq = devel + kubernetes + mysql + doc + password + cgroups devel_hadoop = devel_minreq + hive + hdfs + webhdfs + kerberos -devel_azure = devel_minreq + azure_data_lake + azure_cosmos -devel_all = (sendgrid + devel + all_dbs + doc + samba + s3 + slack + crypto + oracle + - docker + ssh + kubernetes + celery + azure_blob_storage + redis + gcp_api + +devel_all = (sendgrid + devel + all_dbs + doc + samba + slack + crypto + oracle + + docker + ssh + kubernetes + celery + redis + gcp_api + datadog + zendesk + jdbc + ldap + kerberos + password + webhdfs + jenkins + - druid + pinot + segment + snowflake + elasticsearch + azure_data_lake + azure_cosmos + - atlas + azure_container_instances) + druid + pinot + segment + snowflake + elasticsearch + + atlas + azure + aws) # Snakebite & Google Cloud Dataflow are not Python 3 compatible :'( if PY3: @@ -335,10 +335,8 @@ def do_setup(): 'all_dbs': all_dbs, 'atlas': atlas, 'async': async_packages, - 'azure_blob_storage': azure_blob_storage, - 'azure_data_lake': azure_data_lake, - 'azure_cosmos': azure_cosmos, - 'azure_container_instances': azure_container_instances, + 'aws': aws, + 'azure': azure, 'cassandra': cassandra, 'celery': celery, 'cgroups': cgroups, @@ -349,12 +347,10 @@ def do_setup(): 'datadog': datadog, 'devel': devel_minreq, 'devel_hadoop': devel_hadoop, - 'devel_azure': devel_azure, 'doc': doc, 'docker': docker, 'druid': druid, 'elasticsearch': elasticsearch, - 'emr': emr, 'gcp_api': gcp_api, 'github_enterprise': github_enterprise, 'google_auth': google_auth, @@ -375,7 +371,6 @@ def do_setup(): 'qds': qds, 'rabbitmq': rabbitmq, 'redis': redis, - 's3': s3, 'salesforce': salesforce, 'samba': samba, 'sendgrid': sendgrid, diff --git a/tests/utils/log/test_s3_task_handler.py b/tests/utils/log/test_s3_task_handler.py index a5d5f150ba4a4..1d74b42b1114a 100644 --- a/tests/utils/log/test_s3_task_handler.py +++ b/tests/utils/log/test_s3_task_handler.py @@ -90,7 +90,7 @@ def test_hook_raises(self): mock_error.assert_called_once_with( 'Could not create an S3Hook with connection id "%s". Please make ' - 'sure that airflow[s3] is installed and the S3 connection exists.', + 'sure that airflow[aws] is installed and the S3 connection exists.', '' ) From b6f85740d488d1b2dc0ea7a30335f2974fbfcc96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Fri, 8 Feb 2019 11:31:58 +0100 Subject: [PATCH 0062/1104] [AIRFLOW-3828] Use context manager to manage session in cli.rotate_fernet_key (#4668) --- airflow/bin/cli.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index 05f440c5233a5..c9acc812b7f09 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -603,13 +603,12 @@ def next_execution(args): @cli_utils.action_logging def rotate_fernet_key(args): - session = settings.Session() - for conn in session.query(Connection).filter( - Connection.is_encrypted | Connection.is_extra_encrypted): - conn.rotate_fernet_key() - for var in session.query(Variable).filter(Variable.is_encrypted): - var.rotate_fernet_key() - session.commit() + with db.create_session() as session: + for conn in session.query(Connection).filter( + Connection.is_encrypted | Connection.is_extra_encrypted): + conn.rotate_fernet_key() + for var in session.query(Variable).filter(Variable.is_encrypted): + var.rotate_fernet_key() @cli_utils.action_logging From 2a963909f5b6413c38a74c9022af1d7514baf67b Mon Sep 17 00:00:00 2001 From: Verdan Mahmood Date: Fri, 8 Feb 2019 18:29:56 +0100 Subject: [PATCH 0063/1104] [AIRFLOW-3820] Add back the gunicorn config (#4661) --- airflow/bin/cli.py | 1 + airflow/www/gunicorn_config.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 airflow/www/gunicorn_config.py diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index c9acc812b7f09..c6cc796defef5 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -916,6 +916,7 @@ def webserver(args): '-b', args.hostname + ':' + str(args.port), '-n', 'airflow-webserver', '-p', str(pid), + '-c', 'python:airflow.www.gunicorn_config', ] if args.access_logfile: diff --git a/airflow/www/gunicorn_config.py b/airflow/www/gunicorn_config.py new file mode 100644 index 0000000000000..db76b6de0c5af --- /dev/null +++ b/airflow/www/gunicorn_config.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import setproctitle +from airflow import settings + + +def post_worker_init(dummy_worker): + setproctitle.setproctitle( + settings.GUNICORN_WORKER_READY_PREFIX + setproctitle.getproctitle() + ) From d11147ad65c8a1d93f9371d484f9cc01938dc75f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Fri, 8 Feb 2019 18:32:11 +0100 Subject: [PATCH 0064/1104] [AIRFLOW-XXX] Move out the examples from integration.rst (#4672) --- .../contrib/operators/dataflow_operator.py | 34 ++++++++++++++++ docs/integration.rst | 39 ------------------- 2 files changed, 34 insertions(+), 39 deletions(-) diff --git a/airflow/contrib/operators/dataflow_operator.py b/airflow/contrib/operators/dataflow_operator.py index 536248c826eda..fa1c6720deb48 100644 --- a/airflow/contrib/operators/dataflow_operator.py +++ b/airflow/contrib/operators/dataflow_operator.py @@ -33,6 +33,40 @@ class DataFlowJavaOperator(BaseOperator): Start a Java Cloud DataFlow batch job. The parameters of the operation will be passed to the job. + **Example**: :: + + default_args = { + 'owner': 'airflow', + 'depends_on_past': False, + 'start_date': + (2016, 8, 1), + 'email': ['alex@vanboxel.be'], + 'email_on_failure': False, + 'email_on_retry': False, + 'retries': 1, + 'retry_delay': timedelta(minutes=30), + 'dataflow_default_options': { + 'project': 'my-gcp-project', + 'zone': 'us-central1-f', + 'stagingLocation': 'gs://bucket/tmp/dataflow/staging/', + } + } + + dag = DAG('test-dag', default_args=default_args) + + task = DataFlowJavaOperator( + gcp_conn_id='gcp_default', + task_id='normalize-cal', + jar='{{var.value.gcp_dataflow_base}}pipeline-ingress-cal-normalize-1.0.jar', + options={ + 'autoscalingAlgorithm': 'BASIC', + 'maxNumWorkers': '50', + 'start': '{{ds}}', + 'partitionType': 'DAY' + + }, + dag=dag) + .. seealso:: For more detail on job submission have a look at the reference: https://cloud.google.com/dataflow/pipelines/specifying-exec-params diff --git a/docs/integration.rst b/docs/integration.rst index 93215d7b76fca..27f29d6bb075f 100644 --- a/docs/integration.rst +++ b/docs/integration.rst @@ -479,45 +479,6 @@ Cloud DataFlow They also use :class:`airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook` to communicate with Google Cloud Platform. -.. _DataFlowJavaOperator: - -DataFlowJavaOperator -^^^^^^^^^^^^^^^^^^^^ - -.. code:: python - - default_args = { - 'owner': 'airflow', - 'depends_on_past': False, - 'start_date': - (2016, 8, 1), - 'email': ['alex@vanboxel.be'], - 'email_on_failure': False, - 'email_on_retry': False, - 'retries': 1, - 'retry_delay': timedelta(minutes=30), - 'dataflow_default_options': { - 'project': 'my-gcp-project', - 'zone': 'us-central1-f', - 'stagingLocation': 'gs://bucket/tmp/dataflow/staging/', - } - } - - dag = DAG('test-dag', default_args=default_args) - - task = DataFlowJavaOperator( - gcp_conn_id='gcp_default', - task_id='normalize-cal', - jar='{{var.value.gcp_dataflow_base}}pipeline-ingress-cal-normalize-1.0.jar', - options={ - 'autoscalingAlgorithm': 'BASIC', - 'maxNumWorkers': '50', - 'start': '{{ds}}', - 'partitionType': 'DAY' - - }, - dag=dag) - Cloud DataProc '''''''''''''' From 8d6dcd1840fd5f4e5d80c16090b06393352a4af0 Mon Sep 17 00:00:00 2001 From: BasPH Date: Sun, 10 Feb 2019 12:47:41 +0100 Subject: [PATCH 0065/1104] [AIRFLOW-3464] Move SkipMixin out of models.py (#4386) --- airflow/models/__init__.py | 42 ---------- airflow/models/skipmixin.py | 66 ++++++++++++++++ airflow/operators/latest_only_operator.py | 3 +- airflow/operators/python_operator.py | 3 +- airflow/sensors/base_sensor_operator.py | 3 +- tests/models.py | 70 +---------------- tests/models/test_skipmixin.py | 95 +++++++++++++++++++++++ 7 files changed, 170 insertions(+), 112 deletions(-) create mode 100644 airflow/models/skipmixin.py create mode 100644 tests/models/test_skipmixin.py diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index e1d7eb491dd50..04f7cf83d6b80 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -1857,48 +1857,6 @@ def init_run_context(self, raw=False): self._set_context(self) -class SkipMixin(LoggingMixin): - @provide_session - def skip(self, dag_run, execution_date, tasks, session=None): - """ - Sets tasks instances to skipped from the same dag run. - - :param dag_run: the DagRun for which to set the tasks to skipped - :param execution_date: execution_date - :param tasks: tasks to skip (not task_ids) - :param session: db session to use - """ - if not tasks: - return - - task_ids = [d.task_id for d in tasks] - now = timezone.utcnow() - - if dag_run: - session.query(TaskInstance).filter( - TaskInstance.dag_id == dag_run.dag_id, - TaskInstance.execution_date == dag_run.execution_date, - TaskInstance.task_id.in_(task_ids) - ).update({TaskInstance.state: State.SKIPPED, - TaskInstance.start_date: now, - TaskInstance.end_date: now}, - synchronize_session=False) - session.commit() - else: - assert execution_date is not None, "Execution date is None and no dag run" - - self.log.warning("No DAG RUN present this should not happen") - # this is defensive against dag runs that are not complete - for task in tasks: - ti = TaskInstance(task, execution_date=execution_date) - ti.state = State.SKIPPED - ti.start_date = now - ti.end_date = now - session.merge(ti) - - session.commit() - - @functools.total_ordering class BaseOperator(LoggingMixin): """ diff --git a/airflow/models/skipmixin.py b/airflow/models/skipmixin.py new file mode 100644 index 0000000000000..c0adbd20aa4a4 --- /dev/null +++ b/airflow/models/skipmixin.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from airflow.models import TaskInstance +from airflow.utils import timezone +from airflow.utils.db import provide_session +from airflow.utils.log.logging_mixin import LoggingMixin +from airflow.utils.state import State + + +class SkipMixin(LoggingMixin): + @provide_session + def skip(self, dag_run, execution_date, tasks, session=None): + """ + Sets tasks instances to skipped from the same dag run. + + :param dag_run: the DagRun for which to set the tasks to skipped + :param execution_date: execution_date + :param tasks: tasks to skip (not task_ids) + :param session: db session to use + """ + if not tasks: + return + + task_ids = [d.task_id for d in tasks] + now = timezone.utcnow() + + if dag_run: + session.query(TaskInstance).filter( + TaskInstance.dag_id == dag_run.dag_id, + TaskInstance.execution_date == dag_run.execution_date, + TaskInstance.task_id.in_(task_ids) + ).update({TaskInstance.state: State.SKIPPED, + TaskInstance.start_date: now, + TaskInstance.end_date: now}, + synchronize_session=False) + session.commit() + else: + assert execution_date is not None, "Execution date is None and no dag run" + + self.log.warning("No DAG RUN present this should not happen") + # this is defensive against dag runs that are not complete + for task in tasks: + ti = TaskInstance(task, execution_date=execution_date) + ti.state = State.SKIPPED + ti.start_date = now + ti.end_date = now + session.merge(ti) + + session.commit() diff --git a/airflow/operators/latest_only_operator.py b/airflow/operators/latest_only_operator.py index 113977491cc4b..6fee50965d42a 100644 --- a/airflow/operators/latest_only_operator.py +++ b/airflow/operators/latest_only_operator.py @@ -17,7 +17,8 @@ # specific language governing permissions and limitations # under the License. -from airflow.models import BaseOperator, SkipMixin +from airflow.models import BaseOperator +from airflow.models.skipmixin import SkipMixin from airflow.utils import timezone diff --git a/airflow/operators/python_operator.py b/airflow/operators/python_operator.py index 52ad0db7c2de8..5b17eea724883 100644 --- a/airflow/operators/python_operator.py +++ b/airflow/operators/python_operator.py @@ -29,7 +29,8 @@ from builtins import str from airflow.exceptions import AirflowException -from airflow.models import BaseOperator, SkipMixin +from airflow.models import BaseOperator +from airflow.models.skipmixin import SkipMixin from airflow.utils.decorators import apply_defaults from airflow.utils.file import TemporaryDirectory from airflow.utils.operator_helpers import context_to_airflow_vars diff --git a/airflow/sensors/base_sensor_operator.py b/airflow/sensors/base_sensor_operator.py index 8f12e23855fee..9ffd6acb89ef4 100644 --- a/airflow/sensors/base_sensor_operator.py +++ b/airflow/sensors/base_sensor_operator.py @@ -23,7 +23,8 @@ from airflow.exceptions import AirflowException, AirflowSensorTimeout, \ AirflowSkipException, AirflowRescheduleException -from airflow.models import BaseOperator, SkipMixin +from airflow.models import BaseOperator +from airflow.models.skipmixin import SkipMixin from airflow.models.taskreschedule import TaskReschedule from airflow.utils import timezone from airflow.utils.decorators import apply_defaults diff --git a/tests/models.py b/tests/models.py index 0a013930c9bf3..7612c3e1c9cd9 100644 --- a/tests/models.py +++ b/tests/models.py @@ -36,10 +36,10 @@ import pendulum import six -from mock import ANY, Mock, mock_open, patch -from parameterized import parameterized -from freezegun import freeze_time from cryptography.fernet import Fernet +from freezegun import freeze_time +from mock import ANY, mock_open, patch +from parameterized import parameterized from airflow import AirflowException, configuration, models, settings from airflow.contrib.sensors.python_sensor import PythonSensor @@ -47,7 +47,6 @@ from airflow.jobs import BackfillJob from airflow.models import DAG, TaskInstance as TI from airflow.models import DagModel, DagRun -from airflow.models import SkipMixin from airflow.models import State as ST from airflow.models import Variable from airflow.models import clear_task_instances @@ -3391,66 +3390,3 @@ def test_connection_from_uri_no_schema(self): self.assertEqual(connection.login, 'user') self.assertEqual(connection.password, 'password with space') self.assertEqual(connection.port, 1234) - - -class TestSkipMixin(unittest.TestCase): - - @patch('airflow.models.timezone.utcnow') - def test_skip(self, mock_now): - session = settings.Session() - now = datetime.datetime.utcnow().replace(tzinfo=pendulum.timezone('UTC')) - mock_now.return_value = now - dag = DAG( - 'dag', - start_date=DEFAULT_DATE, - ) - with dag: - tasks = [DummyOperator(task_id='task')] - dag_run = dag.create_dagrun( - run_id='manual__' + now.isoformat(), - state=State.FAILED, - ) - SkipMixin().skip( - dag_run=dag_run, - execution_date=now, - tasks=tasks, - session=session) - - session.query(TI).filter( - TI.dag_id == 'dag', - TI.task_id == 'task', - TI.state == State.SKIPPED, - TI.start_date == now, - TI.end_date == now, - ).one() - - @patch('airflow.models.timezone.utcnow') - def test_skip_none_dagrun(self, mock_now): - session = settings.Session() - now = datetime.datetime.utcnow().replace(tzinfo=pendulum.timezone('UTC')) - mock_now.return_value = now - dag = DAG( - 'dag', - start_date=DEFAULT_DATE, - ) - with dag: - tasks = [DummyOperator(task_id='task')] - SkipMixin().skip( - dag_run=None, - execution_date=now, - tasks=tasks, - session=session) - - session.query(TI).filter( - TI.dag_id == 'dag', - TI.task_id == 'task', - TI.state == State.SKIPPED, - TI.start_date == now, - TI.end_date == now, - ).one() - - def test_skip_none_tasks(self): - session = Mock() - SkipMixin().skip(dag_run=None, execution_date=None, tasks=[], session=session) - self.assertFalse(session.query.called) - self.assertFalse(session.commit.called) diff --git a/tests/models/test_skipmixin.py b/tests/models/test_skipmixin.py new file mode 100644 index 0000000000000..9b1230409dc2c --- /dev/null +++ b/tests/models/test_skipmixin.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import datetime +import unittest + +import pendulum +from mock import patch, Mock + +from airflow.models import settings, DAG, TaskInstance as TI +from airflow.models.skipmixin import SkipMixin +from airflow.operators.dummy_operator import DummyOperator +from airflow.utils import timezone +from airflow.utils.state import State + +DEFAULT_DATE = timezone.datetime(2016, 1, 1) + + +class TestSkipMixin(unittest.TestCase): + + @patch('airflow.models.timezone.utcnow') + def test_skip(self, mock_now): + session = settings.Session() + now = datetime.datetime.utcnow().replace(tzinfo=pendulum.timezone('UTC')) + mock_now.return_value = now + dag = DAG( + 'dag', + start_date=DEFAULT_DATE, + ) + with dag: + tasks = [DummyOperator(task_id='task')] + dag_run = dag.create_dagrun( + run_id='manual__' + now.isoformat(), + state=State.FAILED, + ) + SkipMixin().skip( + dag_run=dag_run, + execution_date=now, + tasks=tasks, + session=session) + + session.query(TI).filter( + TI.dag_id == 'dag', + TI.task_id == 'task', + TI.state == State.SKIPPED, + TI.start_date == now, + TI.end_date == now, + ).one() + + @patch('airflow.models.timezone.utcnow') + def test_skip_none_dagrun(self, mock_now): + session = settings.Session() + now = datetime.datetime.utcnow().replace(tzinfo=pendulum.timezone('UTC')) + mock_now.return_value = now + dag = DAG( + 'dag', + start_date=DEFAULT_DATE, + ) + with dag: + tasks = [DummyOperator(task_id='task')] + SkipMixin().skip( + dag_run=None, + execution_date=now, + tasks=tasks, + session=session) + + session.query(TI).filter( + TI.dag_id == 'dag', + TI.task_id == 'task', + TI.state == State.SKIPPED, + TI.start_date == now, + TI.end_date == now, + ).one() + + def test_skip_none_tasks(self): + session = Mock() + SkipMixin().skip(dag_run=None, execution_date=None, tasks=[], session=session) + self.assertFalse(session.query.called) + self.assertFalse(session.commit.called) From 59d2615459bf3bef0c11431f4722ea83ee20eae8 Mon Sep 17 00:00:00 2001 From: BasPH Date: Sun, 10 Feb 2019 19:59:46 +0100 Subject: [PATCH 0066/1104] AIRFLOW-[3823] Exclude branch's downstream tasks from the tasks to skip (#4666) --- airflow/operators/python_operator.py | 17 ++++++-- tests/operators/test_python_operator.py | 58 +++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 3 deletions(-) diff --git a/airflow/operators/python_operator.py b/airflow/operators/python_operator.py index 5b17eea724883..dc6639ca08ab4 100644 --- a/airflow/operators/python_operator.py +++ b/airflow/operators/python_operator.py @@ -23,10 +23,11 @@ import subprocess import sys import types +from builtins import str from textwrap import dedent import dill -from builtins import str +import six from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -138,7 +139,7 @@ class BranchPythonOperator(PythonOperator, SkipMixin): """ def execute(self, context): branch = super(BranchPythonOperator, self).execute(context) - if isinstance(branch, str): + if isinstance(branch, six.string_types): branch = [branch] self.log.info("Following branch %s", branch) self.log.info("Marking other directly downstream tasks as skipped") @@ -146,8 +147,18 @@ def execute(self, context): downstream_tasks = context['task'].downstream_list self.log.debug("Downstream task_ids %s", downstream_tasks) - skip_tasks = [t for t in downstream_tasks if t.task_id not in branch] if downstream_tasks: + # Also check downstream tasks of the branch task. In case the task to skip + # is a downstream task of the branch task, we exclude it from skipping. + branch_downstream_task_ids = set() + for b in branch: + branch_downstream_task_ids.update(context["dag"]. + get_task(b). + get_flat_relative_ids(upstream=False)) + skip_tasks = [t + for t in downstream_tasks + if t.task_id not in branch and + t.task_id not in branch_downstream_task_ids] self.skip(context['dag_run'], context['ti'].execution_date, skip_tasks) self.log.info("Done.") diff --git a/tests/operators/test_python_operator.py b/tests/operators/test_python_operator.py index c22df9d50a2e8..266b84bdc24e4 100644 --- a/tests/operators/test_python_operator.py +++ b/tests/operators/test_python_operator.py @@ -289,6 +289,64 @@ def test_with_dag_run(self): else: raise Exception + def test_with_skip_in_branch_downstream_dependencies(self): + self.branch_op = BranchPythonOperator(task_id='make_choice', + dag=self.dag, + python_callable=lambda: 'branch_1') + + self.branch_op >> self.branch_1 >> self.branch_2 + self.branch_op >> self.branch_2 + self.dag.clear() + + dr = self.dag.create_dagrun( + run_id="manual__", + start_date=timezone.utcnow(), + execution_date=DEFAULT_DATE, + state=State.RUNNING + ) + + self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) + + tis = dr.get_task_instances() + for ti in tis: + if ti.task_id == 'make_choice': + self.assertEqual(ti.state, State.SUCCESS) + elif ti.task_id == 'branch_1': + self.assertEqual(ti.state, State.NONE) + elif ti.task_id == 'branch_2': + self.assertEqual(ti.state, State.NONE) + else: + raise Exception + + def test_with_skip_in_branch_downstream_dependencies2(self): + self.branch_op = BranchPythonOperator(task_id='make_choice', + dag=self.dag, + python_callable=lambda: 'branch_2') + + self.branch_op >> self.branch_1 >> self.branch_2 + self.branch_op >> self.branch_2 + self.dag.clear() + + dr = self.dag.create_dagrun( + run_id="manual__", + start_date=timezone.utcnow(), + execution_date=DEFAULT_DATE, + state=State.RUNNING + ) + + self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) + + tis = dr.get_task_instances() + for ti in tis: + if ti.task_id == 'make_choice': + self.assertEqual(ti.state, State.SUCCESS) + elif ti.task_id == 'branch_1': + self.assertEqual(ti.state, State.SKIPPED) + elif ti.task_id == 'branch_2': + self.assertEqual(ti.state, State.NONE) + else: + raise Exception + class ShortCircuitOperatorTest(unittest.TestCase): @classmethod From 8917ce6befc8cb69cefc7f11fd069fe2bd94aa8d Mon Sep 17 00:00:00 2001 From: Tanay Tummalapalli Date: Mon, 11 Feb 2019 15:23:55 +0530 Subject: [PATCH 0067/1104] [AIRFLOW-3742] Fix handling of "fallback" for AirflowConfigParsxer.getint/boolean (#4674) We added (and used) fallback as an argument on `getboolean` but didn't add it to the method, or add tests covering those "casting" accessors, so they broke. This fixes those methods, and adds tests covering them --- airflow/configuration.py | 20 ++++++++++---------- tests/test_configuration.py | 5 ++++- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/airflow/configuration.py b/airflow/configuration.py index 0de6bb9947e2a..c7fd3bd1417f8 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -260,27 +260,27 @@ def get(self, section, key, **kwargs): "section/key [{section}/{key}] not found " "in config".format(**locals())) - def getboolean(self, section, key): - val = str(self.get(section, key)).lower().strip() + def getboolean(self, section, key, **kwargs): + val = str(self.get(section, key, **kwargs)).lower().strip() if '#' in val: val = val.split('#')[0].strip() - if val.lower() in ('t', 'true', '1'): + if val in ('t', 'true', '1'): return True - elif val.lower() in ('f', 'false', '0'): + elif val in ('f', 'false', '0'): return False else: raise AirflowConfigException( 'The value for configuration option "{}:{}" is not a ' 'boolean (received "{}").'.format(section, key, val)) - def getint(self, section, key): - return int(self.get(section, key)) + def getint(self, section, key, **kwargs): + return int(self.get(section, key, **kwargs)) - def getfloat(self, section, key): - return float(self.get(section, key)) + def getfloat(self, section, key, **kwargs): + return float(self.get(section, key, **kwargs)) - def read(self, filenames): - super(AirflowConfigParser, self).read(filenames) + def read(self, filenames, **kwargs): + super(AirflowConfigParser, self).read(filenames, **kwargs) self._validate() def read_dict(self, *args, **kwargs): diff --git a/tests/test_configuration.py b/tests/test_configuration.py index ba160f768bcc9..2c6489c04d032 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -139,7 +139,7 @@ def test_conf_as_dict_raw(self): self.assertEqual(cfg_dict['testsection']['testpercent'], 'with%%percent') self.assertEqual(cfg_dict['core']['percent'], 'with%%inside') - def test_command_config(self): + def test_command_precedence(self): TEST_CONFIG = '''[test] key1 = hello key2_cmd = printf cmd_result @@ -170,6 +170,9 @@ def test_command_config(self): self.assertEqual('hello', test_conf.get('test', 'key1', fallback='fb')) self.assertEqual('value6', test_conf.get('another', 'key6', fallback='fb')) self.assertEqual('fb', test_conf.get('another', 'key7', fallback='fb')) + self.assertEqual(True, test_conf.getboolean('another', 'key8_boolean', fallback='True')) + self.assertEqual(10, test_conf.getint('another', 'key8_int', fallback='10')) + self.assertEqual(1.0, test_conf.getfloat('another', 'key8_float', fallback='1')) self.assertTrue(test_conf.has_option('test', 'key1')) self.assertTrue(test_conf.has_option('test', 'key2')) From 07ac00da4079a36ece8b3ddeb265d31afbe242ad Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Mon, 11 Feb 2019 11:15:54 +0100 Subject: [PATCH 0068/1104] [AIRFLOW-3866] Run docker-compose pull silently in CI (#4688) To reduce the output in Travis --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 74cce5f521735..6e2dd7750e49c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -65,8 +65,11 @@ install: - chmod +x docker-compose - sudo mv docker-compose /usr/local/bin - pip install --upgrade pip + - docker-compose -f scripts/ci/docker-compose.yml pull --quiet --parallel script: - - if [ -z "$KUBERNETES_VERSION" ]; then docker-compose --log-level ERROR -f scripts/ci/docker-compose.yml run airflow-testing /app/scripts/ci/run-ci.sh; fi + - if [ -z "$KUBERNETES_VERSION" ]; then + docker-compose --log-level ERROR -f scripts/ci/docker-compose.yml run airflow-testing /app/scripts/ci/run-ci.sh; + fi - if [ ! -z "$KUBERNETES_VERSION" ]; then ./scripts/ci/kubernetes/minikube/stop_minikube.sh && ./scripts/ci/kubernetes/setup_kubernetes.sh && From 4f3d0fdd77c4f6f30272ae0743d6741e458d2a09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Mon, 11 Feb 2019 11:50:33 +0100 Subject: [PATCH 0069/1104] [AIRFLOW-XXX] Docs rendering improvement (#4684) --- airflow/contrib/hooks/bigquery_hook.py | 44 +++++++++---------- airflow/contrib/hooks/databricks_hook.py | 19 ++++---- airflow/contrib/hooks/spark_submit_hook.py | 27 ++++++++---- .../contrib/operators/dataflow_operator.py | 2 +- .../mlengine_operator_utils.py | 2 +- .../mlengine_prediction_summary.py | 0 airflow/executors/celery_executor.py | 10 +++-- .../test_mlengine_operator_utils.py | 21 ++++----- 8 files changed, 69 insertions(+), 56 deletions(-) rename airflow/contrib/{operators => utils}/mlengine_operator_utils.py (99%) rename airflow/contrib/{operators => utils}/mlengine_prediction_summary.py (100%) rename tests/contrib/{operators => utils}/test_mlengine_operator_utils.py (88%) diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py index b7dffd0a98974..9d7c5fb38f4ac 100644 --- a/airflow/contrib/hooks/bigquery_hook.py +++ b/airflow/contrib/hooks/bigquery_hook.py @@ -255,7 +255,7 @@ def create_empty_table(self, partition by field, type and expiration as per API specifications. .. seealso:: - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#timePartitioning + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#timePartitioning :type time_partitioning: dict :param view: [Optional] A dictionary containing definition for the view. If set, it will create a view instead of a table: @@ -269,7 +269,7 @@ def create_empty_table(self, "useLegacySql": False } - :return: + :return: None """ project_id = project_id if project_id is not None else self.project_id @@ -356,7 +356,7 @@ def create_external_table(self, Possible values include GZIP and NONE. The default value is NONE. This setting is ignored for Google Cloud Bigtable, - Google Cloud Datastore backups and Avro formats. + Google Cloud Datastore backups and Avro formats. :type compression: str :param ignore_unknown_values: [Optional] Indicates if BigQuery should allow extra values that are not represented in the table schema. @@ -546,28 +546,26 @@ def patch_table(self, https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema The supported schema modifications and unsupported schema modification are listed here: https://cloud.google.com/bigquery/docs/managing-table-schemas - :type schema: list + **Example**: :: - **Example**: :: - - schema=[{"name": "emp_name", "type": "STRING", "mode": "REQUIRED"}, - {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"}] + schema=[{"name": "emp_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"}] + :type schema: list :param time_partitioning: [Optional] A dictionary containing time-based partitioning definition for the table. :type time_partitioning: dict :param view: [Optional] A dictionary containing definition for the view. If set, it will patch a view instead of a table: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#view - :type view: dict + **Example**: :: - **Example**: :: - - view = { - "query": "SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*` LIMIT 500", - "useLegacySql": False - } + view = { + "query": "SELECT * FROM `test-project-id.test_dataset_id.test_table_prefix*` LIMIT 500", + "useLegacySql": False + } + :type view: dict :param require_partition_filter: [Optional] If true, queries over the this table require a partition filter. If false, queries over the table :type require_partition_filter: bool @@ -919,14 +917,14 @@ def run_copy(self, For more details about these parameters. :param source_project_dataset_tables: One or more dotted - (project:|project.).
+ ``(project:|project.).
`` BigQuery tables to use as the source data. Use a list if there are multiple source tables. If is not included, project will be the project defined in the connection json. :type source_project_dataset_tables: list|string :param destination_project_dataset_table: The destination BigQuery - table. Format is: (project:|project.).
+ table. Format is: ``(project:|project.).
`` :type destination_project_dataset_table: str :param write_disposition: The write disposition if the table already exists. :type write_disposition: str @@ -1371,11 +1369,11 @@ def run_table_delete(self, deletion_dataset_table, is set to True. :param deletion_dataset_table: A dotted - (.|:).
that indicates which table - will be deleted. + ``(.|:).
`` that indicates which table + will be deleted. :type deletion_dataset_table: str :param ignore_if_missing: if True, then return success even if the - requested table does not exist. + requested table does not exist. :type ignore_if_missing: bool :return: """ @@ -1410,7 +1408,7 @@ def run_table_upsert(self, dataset_id, table_resource, project_id=None): https://cloud.google.com/bigquery/docs/reference/v2/tables#resource :type table_resource: dict :param project_id: the project to upsert the table into. If None, - project will be self.project_id. + project will be self.project_id. :return: """ # check to see if the table exists @@ -1464,10 +1462,10 @@ def run_grant_dataset_view_access(self, :param view_table: the table of the view :type view_table: str :param source_project: the project of the source dataset. If None, - self.project_id will be used. + self.project_id will be used. :type source_project: str :param view_project: the project that the view is in. If None, - self.project_id will be used. + self.project_id will be used. :type view_project: str :return: the datasets resource of the source dataset. """ diff --git a/airflow/contrib/hooks/databricks_hook.py b/airflow/contrib/hooks/databricks_hook.py index a83e577e6aa3a..4bca95d44ad0a 100644 --- a/airflow/contrib/hooks/databricks_hook.py +++ b/airflow/contrib/hooks/databricks_hook.py @@ -79,16 +79,18 @@ def _parse_host(host): The purpose of this function is to be robust to improper connections settings provided by users, specifically in the host field. - For example -- when users supply ``https://xx.cloud.databricks.com`` as the - host, we must strip out the protocol to get the host. - >>> h = DatabricksHook() - >>> assert h._parse_host('https://xx.cloud.databricks.com') == \ - 'xx.cloud.databricks.com' + host, we must strip out the protocol to get the host.:: + + h = DatabricksHook() + assert h._parse_host('https://xx.cloud.databricks.com') == \ + 'xx.cloud.databricks.com' In the case where users supply the correct ``xx.cloud.databricks.com`` as the - host, this function is a no-op. - >>> assert h._parse_host('xx.cloud.databricks.com') == 'xx.cloud.databricks.com' + host, this function is a no-op.:: + + assert h._parse_host('xx.cloud.databricks.com') == 'xx.cloud.databricks.com' + """ urlparse_host = urlparse.urlparse(host).hostname if urlparse_host: @@ -101,8 +103,9 @@ def _parse_host(host): def _do_api_call(self, endpoint_info, json): """ Utility function to perform an API call with retries + :param endpoint_info: Tuple of method and endpoint - :type endpoint_info: (string, string) + :type endpoint_info: tuple[string, string] :param json: Parameters for this API call. :type json: dict :return: If the api call returns a OK status code, diff --git a/airflow/contrib/hooks/spark_submit_hook.py b/airflow/contrib/hooks/spark_submit_hook.py index 0912fb40a5138..51a59d460022a 100644 --- a/airflow/contrib/hooks/spark_submit_hook.py +++ b/airflow/contrib/hooks/spark_submit_hook.py @@ -452,16 +452,25 @@ def _start_driver_status_tracking(self): Finish failed when the status is ERROR/UNKNOWN/KILLED/FAILED. Possible status: - SUBMITTED: Submitted but not yet scheduled on a worker - RUNNING: Has been allocated to a worker to run - FINISHED: Previously ran and exited cleanly - RELAUNCHING: Exited non-zero or due to worker failure, but has not yet + + SUBMITTED + Submitted but not yet scheduled on a worker + RUNNING + Has been allocated to a worker to run + FINISHED + Previously ran and exited cleanly + RELAUNCHING + Exited non-zero or due to worker failure, but has not yet started running again - UNKNOWN: The status of the driver is temporarily not known due to - master failure recovery - KILLED: A user manually killed this driver - FAILED: The driver exited non-zero and was not supervised - ERROR: Unable to run or restart due to an unrecoverable error + UNKNOWN + The status of the driver is temporarily not known due to + master failure recovery + KILLED + A user manually killed this driver + FAILED + The driver exited non-zero and was not supervised + ERROR + Unable to run or restart due to an unrecoverable error (e.g. missing jar file) """ diff --git a/airflow/contrib/operators/dataflow_operator.py b/airflow/contrib/operators/dataflow_operator.py index fa1c6720deb48..0f7ead15d6293 100644 --- a/airflow/contrib/operators/dataflow_operator.py +++ b/airflow/contrib/operators/dataflow_operator.py @@ -399,7 +399,7 @@ def google_cloud_to_local(self, file_name): :param file_name: The full path of input file. :type file_name: str :return: The full path of local file. - :type str + :rtype: str """ if not file_name.startswith('gs://'): return file_name diff --git a/airflow/contrib/operators/mlengine_operator_utils.py b/airflow/contrib/utils/mlengine_operator_utils.py similarity index 99% rename from airflow/contrib/operators/mlengine_operator_utils.py rename to airflow/contrib/utils/mlengine_operator_utils.py index 4a8fa0589def7..c3ca8530dee6e 100644 --- a/airflow/contrib/operators/mlengine_operator_utils.py +++ b/airflow/contrib/utils/mlengine_operator_utils.py @@ -213,7 +213,7 @@ def validate_err_and_count(summary): evaluate_summary = DataFlowPythonOperator( task_id=(task_prefix + "-summary"), py_options=["-m"], - py_file="airflow.contrib.operators.mlengine_prediction_summary", + py_file="airflow.contrib.utils.mlengine_prediction_summary", dataflow_default_options=dataflow_options, options={ "prediction_path": prediction_path, diff --git a/airflow/contrib/operators/mlengine_prediction_summary.py b/airflow/contrib/utils/mlengine_prediction_summary.py similarity index 100% rename from airflow/contrib/operators/mlengine_prediction_summary.py rename to airflow/contrib/utils/mlengine_prediction_summary.py diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 44769f62d328a..e91bb7efc4113 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -75,10 +75,11 @@ def execute_command(command_to_exec): class ExceptionWithTraceback(object): """ Wrapper class used to propagate exceptions to parent processes from subprocesses. + :param exception: The exception to wrap :type exception: Exception - :param traceback: The stacktrace to wrap - :type traceback: str + :param exception_traceback: The stacktrace to wrap + :type exception_traceback: str """ def __init__(self, exception, exception_traceback): @@ -90,11 +91,12 @@ def fetch_celery_task_state(celery_task): """ Fetch and return the state of the given celery task. The scope of this function is global so that it can be called by subprocesses in the pool. + :param celery_task: a tuple of the Celery task key and the async Celery object used - to fetch the task's state + to fetch the task's state :type celery_task: tuple(str, celery.result.AsyncResult) :return: a tuple of the Celery task key and the Celery state of the task - :rtype: luple[str, str] + :rtype: tuple[str, str] """ try: diff --git a/tests/contrib/operators/test_mlengine_operator_utils.py b/tests/contrib/utils/test_mlengine_operator_utils.py similarity index 88% rename from tests/contrib/operators/test_mlengine_operator_utils.py rename to tests/contrib/utils/test_mlengine_operator_utils.py index a0722650417d2..c6c0849250040 100644 --- a/tests/contrib/operators/test_mlengine_operator_utils.py +++ b/tests/contrib/utils/test_mlengine_operator_utils.py @@ -23,8 +23,7 @@ import unittest from airflow import configuration, DAG -from airflow.contrib.operators import mlengine_operator_utils -from airflow.contrib.operators.mlengine_operator_utils import create_evaluate_ops +from airflow.contrib.utils import mlengine_operator_utils from airflow.exceptions import AirflowException from airflow.version import version @@ -76,7 +75,7 @@ def setUp(self): def testSuccessfulRun(self): input_with_model = self.INPUT_MISSING_ORIGIN.copy() - pred, summary, validate = create_evaluate_ops( + pred, summary, validate = mlengine_operator_utils.create_evaluate_ops( task_prefix='eval-test', batch_prediction_job_id='eval-test-prediction', data_format=input_with_model['dataFormat'], @@ -118,10 +117,10 @@ def testSuccessfulRun(self): 'metric_keys': 'err', 'metric_fn_encoded': self.metric_fn_encoded, }, - 'airflow.contrib.operators.mlengine_prediction_summary', + 'airflow.contrib.utils.mlengine_prediction_summary', ['-m']) - with patch('airflow.contrib.operators.mlengine_operator_utils.' + with patch('airflow.contrib.utils.mlengine_operator_utils.' 'GoogleCloudStorageHook') as mock_gcs_hook: hook_instance = mock_gcs_hook.return_value hook_instance.download.return_value = '{"err": 0.9, "count": 9}' @@ -155,25 +154,27 @@ def testFailures(self): } with self.assertRaisesRegexp(AirflowException, 'Missing model origin'): - create_evaluate_ops(**other_params_but_models) + mlengine_operator_utils.create_evaluate_ops(**other_params_but_models) with self.assertRaisesRegexp(AirflowException, 'Ambiguous model origin'): - create_evaluate_ops(model_uri='abc', model_name='cde', **other_params_but_models) + mlengine_operator_utils.create_evaluate_ops(model_uri='abc', model_name='cde', + **other_params_but_models) with self.assertRaisesRegexp(AirflowException, 'Ambiguous model origin'): - create_evaluate_ops(model_uri='abc', version_name='vvv', **other_params_but_models) + mlengine_operator_utils.create_evaluate_ops(model_uri='abc', version_name='vvv', + **other_params_but_models) with self.assertRaisesRegexp(AirflowException, '`metric_fn` param must be callable'): params = other_params_but_models.copy() params['metric_fn_and_keys'] = (None, ['abc']) - create_evaluate_ops(model_uri='gs://blah', **params) + mlengine_operator_utils.create_evaluate_ops(model_uri='gs://blah', **params) with self.assertRaisesRegexp(AirflowException, '`validate_fn` param must be callable'): params = other_params_but_models.copy() params['validate_fn'] = None - create_evaluate_ops(model_uri='gs://blah', **params) + mlengine_operator_utils.create_evaluate_ops(model_uri='gs://blah', **params) if __name__ == '__main__': From f4374eda3e270367cbdd70ede248a6b1b1a8d23b Mon Sep 17 00:00:00 2001 From: BasPH Date: Mon, 11 Feb 2019 12:28:26 +0100 Subject: [PATCH 0070/1104] [AIRFLOW-XXX] Add section on task lifecycle & correct casing in docs (#4681) --- docs/concepts.rst | 52 +++++++++++++++++++------- docs/img/task_lifecycle.png | Bin 0 -> 12919 bytes docs/img/task_manual_vs_scheduled.png | Bin 0 -> 5806 bytes docs/index.rst | 4 +- 4 files changed, 40 insertions(+), 16 deletions(-) create mode 100644 docs/img/task_lifecycle.png create mode 100644 docs/img/task_manual_vs_scheduled.png diff --git a/docs/concepts.rst b/docs/concepts.rst index 7aea8dfe3c086..2791c46eda31d 100644 --- a/docs/concepts.rst +++ b/docs/concepts.rst @@ -18,7 +18,7 @@ Concepts ######## -The Airflow Platform is a tool for describing, executing, and monitoring +The Airflow platform is a tool for describing, executing, and monitoring workflows. Core Ideas @@ -277,10 +277,34 @@ Task Instances ============== A task instance represents a specific run of a task and is characterized as the -combination of a dag, a task, and a point in time. Task instances also have an +combination of a DAG, a task, and a point in time. Task instances also have an indicative state, which could be "running", "success", "failed", "skipped", "up for retry", etc. +Task Lifecycle +============== + +A task goes through various stages from start to completion. In the Airflow UI +(graph and tree views), these stages are displayed by a color representing each +stage: + +.. image:: img/task_lifecycle.png + +The happy flow consists of the following stages: + +1. no status (scheduler created empty task instance) +2. queued (scheduler placed a task to run on the queue) +3. running (worker picked up a task and is now running it) +4. success (task completed) + +There is also visual difference between scheduled and manually triggered +DAGs/tasks: + +.. image:: img/task_manual_vs_scheduled.png + +The DAGs/tasks with a black border are scheduled runs, whereas the non-bordered +DAGs/tasks are manually triggered, i.e. by `airflow trigger_dag`. + Workflows ========= @@ -753,7 +777,7 @@ It is possible, through use of trigger rules to mix tasks that should run in the typical date/time dependent mode and those using the ``LatestOnlyOperator``. -For example, consider the following dag: +For example, consider the following DAG: .. code:: python @@ -786,7 +810,7 @@ For example, consider the following dag: trigger_rule=TriggerRule.ALL_DONE) task4.set_upstream([task1, task2]) -In the case of this dag, the ``latest_only`` task will show up as skipped +In the case of this DAG, the ``latest_only`` task will show up as skipped for all runs except the latest run. ``task1`` is directly downstream of ``latest_only`` and will also skip for all runs except the latest. ``task2`` is entirely independent of ``latest_only`` and will run in all @@ -825,7 +849,7 @@ state. Cluster Policy ============== -Your local airflow settings file can define a ``policy`` function that +Your local Airflow settings file can define a ``policy`` function that has the ability to mutate task attributes based on other task or DAG attributes. It receives a single argument as a reference to task objects, and is expected to alter its attributes. @@ -848,8 +872,8 @@ may look like inside your ``airflow_settings.py``: Documentation & Notes ===================== -It's possible to add documentation or notes to your dags & task objects that -become visible in the web interface ("Graph View" for dags, "Task Details" for +It's possible to add documentation or notes to your DAGs & task objects that +become visible in the web interface ("Graph View" for DAGs, "Task Details" for tasks). There are a set of special task attributes that get rendered as rich content if defined: @@ -863,7 +887,7 @@ doc_md markdown doc_rst reStructuredText ========== ================ -Please note that for dags, doc_md is the only attribute interpreted. +Please note that for DAGs, doc_md is the only attribute interpreted. This is especially useful if your tasks are built dynamically from configuration files, it allows you to expose the configuration that led @@ -917,14 +941,14 @@ You can use Jinja templating with every parameter that is marked as "templated" in the documentation. Template substitution occurs just before the pre_execute function of your operator is called. -Packaged dags +Packaged DAGs ''''''''''''' -While often you will specify dags in a single ``.py`` file it might sometimes -be required to combine dag and its dependencies. For example, you might want -to combine several dags together to version them together or you might want +While often you will specify DAGs in a single ``.py`` file it might sometimes +be required to combine a DAG and its dependencies. For example, you might want +to combine several DAGs together to version them together or you might want to manage them together or you might need an extra module that is not available -by default on the system you are running airflow on. To allow this you can create -a zip file that contains the dag(s) in the root of the zip file and have the extra +by default on the system you are running Airflow on. To allow this you can create +a zip file that contains the DAG(s) in the root of the zip file and have the extra modules unpacked in directories. For instance you can create a zip file that looks like this: diff --git a/docs/img/task_lifecycle.png b/docs/img/task_lifecycle.png new file mode 100644 index 0000000000000000000000000000000000000000..fde65882b06a4d13b96d05963a74b8bda00de972 GIT binary patch literal 12919 zcmeHt^;cZMvMv%dxCD2X;2zv1NN{%#gS!TIcN+o(cXxO9;1C$x2X`Jh>)gA}J@>5j z{($#>*wcGf@7~qbRsB`%t}k3kK?)fW9}xlq0$D~{Tp0oadiedG4<7dY*XECXGz0{a zw3V2cl8l%bsgk3exs{C>1cY>WvId-{%E%|6jVQ;GHTAE;8tH5Ze?Lz{9^4+#Siwnivn7Pt#jBf#7xao~ z%TeCWVL}9O zSbMvbEYj%@u|aNrEkDpW?)DD2Hl(`sQ*xZxfTIdEqFW28MiDc1%syRE6`P1nuXJ3Dj(}Y)S zbx~nB!aZzag=479^P0>LBSf{V?Ae?lAyp;|3y;KY?=d{*+<`9XuW?@t@|F$bHTc|@t+^a9tq|1E4Xv}Lb1 zH|wb=2AOjPKiW7hwSe7H28kjBgCcxOTx#tDYX1w^(j7UZkLa$)2d?hLTG3t9l7^pL zm&O^*RuLO#tg|c>X3!r6mrPg1X|RbCAgW z?PPYd>GW0+V%J((li#eYG|D2&GO*>P2%rH>k$o~#>^hC(Ta3!>bPO@?jGcNYoP%ivRh;%Rv#yFCP;w%gmBP@t)Mw&k%;unQh*oSYV zb@9Q{ytNo5qFiJy!&So~J3NjkR#LVUsPVQt*q7)7O8C}4GICuHXy@U~^1oS<^Wja1 z7UfG=BD7*vh<~5SwxsYR?D(NC^<{eBk_49Jb5yvNF;g1qV2^PP2P0;pky8y@IaJvI ze)Xq&zs?93LxCl@tT1-_PiIgX_LRV&EAZjHt~Y{yu-y;=IPqTXnQSA{k8B#D@PkDF ziN9Q$@G$u}X%YrKybb~!e9@oPc?owpN{n=IuRLyV0+sOPeyx70{+s^wev^JzW15;+ zadN!GEE&jOni#Q3iu6)78Od-G8HT?MOATubD~`Ni z=|y}Vh{cdYS29;l`PQhUO`A%qm*iVYya2=(mj7k0f}_Mxf;_J>?=`=ut*x!D-Kh;~ z=-0k#pl`Uf%wF&;zVRy=NScC z9`;?hHR%)7UdZV48EvZe<$gS0=b*reGg+?>ShrsP zZeqkfY2GN`F+rpgQ4w1)>{@Y(x9aA4>q_S;w8FV8P{&=nzk;*8xVq`|^UDX`uWrRI z&3n?$*AC+jy65i82rie-7`$p<^c*F#JF*5QkF!^$&?N6;;-ZR)9Oi=edR^)4OWZto z5_s7>O~J#3@ywFTQL+RL#`Edl40Hwbob;NSH(ci22#(cHesAB7u8(A&>!12g1fHRv zu}Qb9h$IE)kR#ykVthWt56~_82W=&z_)OCK? zMBDV}4D}Ye_juTSI=y)s+Z+%Y@(X>dW4$#AGHHxDM5;#o17m}D0B;IY2=_|3gl>x0 z|FIO=6U&B(5q<;zIq7tG()`@C3*G@aK9~^RV+afy-y-}tAW9)RCc4^t-Ivf?+Djay z6KoN@7ue}Q z+^%b-4tK^jCX}r5HKZv7ZS=RkSw3sm&6|UmLqpM3#BY=vpM71Q(?3Y|&Ib`8*kOuc zq*86jJ4!24Hc%f^D#}*M`KDrI)Up+r^cEvHQR_-`%3v396~)gc%rzF?$RlMon;S1u zPRSG`+J0@!(dBtq!%C!UQ9)5$NFU8?WzLQRCUjD~U^1s@j2(~tF`qZRzT|ywc-{{% z?6DSUVi9guzUN>kbu!zlEB{fhrnR0&`YpIvY8+i7+`wGQ+-}@A6*IH$vjr(Pm75ze zFSEe;PjQ0;3SQm`Z|$vm*OIz=)fLutyT^>fj#}DfbzbruM!QMw9|X6Nh;Rd_5uq^t zFbgo}7L-#<8pRs4bnQHIR(sp&ijlpMzUjeJ+f&xr0A8=2lTfE6C$0&JJ@auuwuL~I zss7?~*uyH063!0eeaBLRyBRn$nC_#F?6~Y+s`D!vSj?xywWR!dFZNZ)RyT+lgV|c= zq{e-Ho7pU&_G-6s7kf7+-04PIttn>S{gJyxIz6*Mas$A>8Ym%PDkfi2j!XTvI!8%h^j`y#ovAceXo1~z%vJ#8j;aQk+>1Wtvrlc)#^p5}nVuS+iLK>I1o z-we)>caV{!J=lQO3_p7O&CQ`MtO?d=enE9NBi*55F6{FslOENTAo{-rEm(*oyF*Sx z*o~g@?c+jt{i)7GZP}`GECb>qDWS&Z=AwHPhPGuy!Yc9y-9bZFO+b(~KwM~Xu}pXQ z8veQZly(Auh>_RGVNMnevIxx4OFoE@&_lRactsv_{o%;`M1=7EX@Oubt>pv(@rmm1 z1u3IUc>w`IwICxdqUsKLx(?S)DFKGNfb1jN`@u^Z21!9{D9RcMgmi*s-TzVigEj(> zbw7;PxIYVxqWHa+bkE*gVoUY@-0gnR$u`aB3%lLj`lSBC{?*3Wr1PEgTWf1;X=|%R z-Ocf0&NoUbsBmHOIC#o`YchGDGVQicr}k+6t>d3hQ$bmg(op>SsDE}+e3%th;FA7; z;V({#{_l+>{4f73oBu8GA7i7?q@j72|M#GOya|^q4%V?&zeJqP8De{hK;C~SK zKGrZu7=L&K_Mg-Hx27B3pCc|2`9Dq;!yo;xq6Gg>C~9GVfLN5{;8`<}@mXEDzrX!> zu_`Bx8nYY)YI(CP@XA_nH*24Nf1J2!xp-&J@tJG;i`<$3%dR=aNtvrd;Y-`H`M~3x zf=)x@PGCr6_tyve4`YS&78r|LG2y$`z?W7+zRZmdU6GCJM}D0w_;G9~i!298@Lkti z2hYp(+8`#(<@0kKr_|Sm z_GrB&RKNSa?GZ&^M~7pb%~tmVF_PArGIsF8vFn-gQhkuVXmhQ|U*6ZuIhWvDp(_Di!(8~MmcT=Yt|I48w^O?`N7mi!Uv7Q%p z%d#caMN)puVX~WU;b*sXtF;RK#aN#0u!G3_{q2U=xp5|-kxu`<7l&!}Y}fY8sn=m_{mj*FN1D&Mji_}u9W;;Xdl4A9 z`SyA@ptEtAvhBV9WwUhgdi~tj4Ls@ASw}du*uKZlXFo;Cy4_*a)Fx$5?v>F$39q8uf7FW0YFD99Xzc602RDh?XVx&CHw z`gS1n_Mo_oWnxctnlS{{J2J$wzh*Yu)^M^M!E6DzVD ztMa`m%jGt+J$}pFd@kg)o5(}&SuFy3TAy!=p^Pg}D&_+_2zaFM{9cX?aN5pVzt^01 zK23hBsHMLO5RMv<$Vuep+Gsm%)Zs;uc&P{qVF4C32DD@RWV0i$wuHB7SWqAEha=Y1 zJu$}7sV|hT5C|plb=UeWXYe??ZLQI!UFC?muU|bunfRjP4%G0V0`8`(GJTZ)7l8Wt z7toqM&?izs>7f}mukUd+9VVD1$~~oaJr~$qQ877$ZvYgtyu`R~qL6lW9!mN$V5t8a z8oTK!Slpqt+~>Kw9wsi&_l4d(T2c>ZQ#+j<){pg@nlF>9+Urp#UwOQUn${f0*s|(k z>ev(nA9l7^7CT*QZhq@9uYa<3dj=2T+~WJZn~zxxnc1ynEP2=9P*y{mUWo1aY4eS# z^Yx}G)hK_~;35Erk;(6242QdY3pr9w|1u%o=1@g*n7b1kU2xg7YTLG5s?lNn;I?rE zbfe2=_Itg`iTrDXfxFJT4#)G~5=@Tqj(e0&LPO+hz%d3P4Z zyMxlAIK`)3*A0(-$7GbMt-el%YH-xT^K&mwr_%Elsvm8XhG|aO#nG|exu=|ocv>ae zNumN@30g$HLF2mX>h`-N-v_)KKrinZrw={%AVGRHDCSGbJ)WDq^L98*%2Fca?D4UT zmqX@gdeisnp&X%iFa1bnMNV`v&D3_HtosKFCrwk&@pzabmeDMsL;7~*>G>j5NO!Me zF8fUNvZ#}yZrIfOsLf)<>&Qc+eH%NH$aTpGnsf{Pe|{IL8RK$Ap?9>p`ReMh<}j2@mf;Td zvO^<8S9MHMJM=6!DS55-=S&wOsEhpZps}d~K$kj_H>Q7EJD~98RQ1CfNXU~^>SW>F zYm|Q{!Kl8bD~?1ucp6b1>iQCK3&jtZR@7Lm<#x8OtE-QDmQMF zdfylYHPuP?Z(Z=;)T9y-j}mU*BGaB?4PGx(seD=ZkBajnG&+vd%(98DXN&A^NOseV zZtIH#0D-scK!iXsQ~_t1+Jyc3mQP!$CQMNS-%k2711n=DhK=i>MyMV%xW{ zIfId%`4>N_v$%(q9vsG5hp-fVj#MJM-r+({e~e@gXCuUNe0oO}J36^!|Hqy9sSj3y-9jRdAK#JhjvjTa z41Xkx`02dM@1b3KiViegoVjsb=wM}^Gd3a{eHfsEqi+O(w+KgQFYJ+~jLnm#95-&Y zFL>8d)-(#n0YI3Ewcw|yy3b^ zi;j(}M*@@`M3c0bUPDR3_MNc4=cCX+yh-EjUw=F4!k*0x{;})gtu=Y=dV4uHpw6Ws zdl^C~n)+R-r*moMk$)L$B!8aIfD*nku4qc~LW{H`*uF0D*S)y_#L6V<$176Y^*^P6 z8AAc0Re4e~Rx6BI^~qpoc}aIP0o<8Q{@9M|wSB>+q^fvo!I2+2Yulf8Kb6aX(usv% zXQ!giTm8omt-pc*D6I31$lr1ra{{^ISh1 zz$AKmBt6mW#lUSdm>?y$Z6>O2aak}sSOvwol)qBoJ!@fr;ea8YGcKgz-$E#0B?Qj= zjX=A0uuuA4r)uyx4ccMocoVl*tV|Ajv)qkt9AqCuw%1Puk1gB1Vg&Z0Q%!Gi11(CN9 zO%NWmWVO@&>r#1=N1^xPNIBF)T5iPsJ*pX~!{mIi?+Y@e0!ERSU4Y-$Zffv|k0or4 z9fhu{+9o2Z#3G~)&BpsXC(bQ7`S7g(=I)b{~@^ zvf)iG>o){Y1h&N(#qh8t)R;v@s!vmUTi8;8EJVag_8C_ME63jxXWrfRj(wn4N{sgv zmjxC`GYBr-?j{m58hR-pfVa1qDiv?Sen8`z!4dj!b*c2KfZ3;Hhk4hd?uT@pz2;na zyL{f!ecj5<#$9I<;}AbPrFQf~No4s!px)x8M*f7)3hM+vz(p)FFpG~6H>(BJb2>00 zQvW({HvkfEjFb~KqP|#?>-X`B#9(6XNsjv*e22ydizvEQXf1`N%abwPJ+=Y``Hk8s z!S}2;wRZDnchIRLCehc;s_$3r(H(b{0q(5bxIcVTg_O~ZIvo60AfhCth!tlZ>bET1 z>@c(y$G8~Kq@u*&aXjf|F_&6dy9BP`ilBS>9!WJteF1|Tw^g>U_tfYpSczZ6`q}ZQgBFxzzFZT8eh4Vo zT%msKSz^~Ok0U^xMeim25LVR6!1^g}(a}D`f#bkV)*H8%Sc- zP;VaMG~&i!V_rE&MwxhAwy;g830-&CL!m+aJbetT25iz7-;!5G|GcLG>tL+y={$-* zyiPpC{cUx5x${oyr*`x>%(F$~CJK3bNps9uZ#z}VOqV%XU90P7>i9#0Mb%zakrX)x z?!%f~3NP6&CvQ5STos3VkC^v8QT4~2qAPy-jNW0*etP41kNcc*sMO3R>b#6o$R!@z z$NoV1)w-cY@k?QPQH8Cg(KP>O0llRqmQ4plt*pA!X=AYi4$#q(#NgaO6tBrAyhPtn zCM-}Su@Qgb%U}!4c-TtAeYBZ32X!!lfIjH zi4--qk!%e~C|DR=S85Uq(J zOuV@Bb~~J&6~dUa+|%b^6K{;<bE88eyp0ODR$jQ2R3nd8Z&`GE$@}9K}!=3 zhvIMJy2$(d-DNiLj(;64TEM_iTS*L<_q*a;L?QhLRV^uwBW~G5Sg`dB>_7ilWHE1u1GMq&9))+ zj&gVur&gphoOaxL`@+9Co z^a7rgc@lSqgvR2lU5$L?BQjdHg?Xgig)2Vsk>Xq%-M)_ z0N)dd&zmZ@xIyp7>dS_zJZSwn0#di_obpR9q(_5FK@5u>@i*S!FR=CmB20=&b~iOv zO5!amVf7*hTw;*W5&KQ}7!2gM1BfrXXa@;gK=!05hrw;fo2okiCm?ZFzr)~dgVRT# zYZEZ~N2UqiKLd+@G4*@ulNjYz>TT4DSYQ;Y0zDa@Xc!9L!Ou(2RKKKL(yipN_=Usn z)uqVu8-8`XP+2U=eUi9zB#F@YWuYBZhi)w}D_C{p2c7<5@OhiT=cWQ8qSO$wOE4xhhCyGfIMf0EVg-}F! z@xpnmwzTSR?)st@0LL1PKHVKD#C3%HK&S(|^Nu?v z4P6#s_4guQPXU?s&OEu5LW_yx2EnqSo6hXTMzv!zWgN1tAJF`28m9 z&ZV@Vv~I6a6`*n@U$9|}T5csEs>bGP7>AW|Ua46~-55=*Wg^r<6Vt-J&G`8(Ts-z` z)}yshqCKNgH4DZqA)CAak>LKkbVS8xV*dX3WXg-Wa!HaP6||r!U;2e@b&pm3L~CD- zTjb+hj(BeeuL|C8`6_4Y6@Au{agR2G)MOv~*+Q;%a^zkq_g}Wt_InP)_dCtD$C`8> ziC&KRLi!9znsW48pu%jr*!uo-mo2#IfsrQ{1rdjjQy zdF>#1IaiI}tC&C8k)wN;StcGzCRcsT26z}Xv|kHZXkz;+eY3?{`2094$cLT>l=N(M z#g_r5-fARa-rx~e_Rrm2l|EmR zsE3GK`0PrBAcxbmIJSeu`D1lrdv|+zTu+<2UMgZI`{}6K6VdKEyZJoi;Z4@uLHFdA zCj3r~3nTZ$M|4B_1d`^1{<16P&+vFOIpj87F`O3BBPn&HjqW^8q$ot?CaQ0zO7YYd z6oN5<;z#iudY9s9rYf|ZueB=`ej|QZPr0B5&zncdVb_<(X+O8WF(=JUj4YzKkY0UH zAhe06`} z=_!js^db*!gJ;#08;W^IjbOdm(3%WvQ2VjU-r(q*oA1%+s-7tfqq&yvCYN2gV8)G; z$*VC5$G33Autu^tEEmEcmB#L5%w)g$yvji%zEL<;-F5U4-KOBNG0mzW zw}ki|53_siUa%VimqtAjVM1NI~>^t;q?0(C}?77_d~cb&oV^zyhT~?=OcOiD*jkdvp=Oz zWEA(!%?fY4#(Lu5EJ6KM;^$wxab5M2Re@*Ednlq1Y3M$xXmcPM>666ze(aFkA_AG1k6q7u5lFXoZ;P-YU61NDg_8t=RM9jBKTI+t; z)WZ=3L#@t#3MCFV6j9I2#9Ax#MtLZ`B}yN42qT=w`o_7?4!G+7QVy10u;eyB39Q{6 zv^e=36X=EV<)pe``qI5J`M{mqO;;yr>K~~i=yzF09IHAf%Z-Ok`o&}RuIQhktu+^< z<9+uw2w?Fgk)Xy9HR8ZHA#v!Uj753Sv(+fhqIww++@9;prkPDb(K?nAP5--dh+&^c zCXoD63JBj4?FA47QR>+tlXWfwcK_1qq>pL6)mX2!Qh9CT1>Qp0YV!h26tfQqcC1bm+ z_Ts7U%O$=!oC_KLa_BTo_WSQK?}M%C`OG|IuM2I9Xp8Ye^med2*9kZH(gP?%7KGB6 z$H5CPT5RN;cg?4G&n}S$$&_8|&eazv@`Gmy7EI^`_pA2Q%lsO#L)|>c13@4xnNBvq z?G3^1Is+Oph!24Qt2Zo&lPyoa^@zMHnQ2RuUpmPJsgJ=rI10Q@1$c;qxnFFs3!OC` z^%opcypK@c!iND0Z==(Ai}D~B=#4F48hCBiU<;W{Jh4S9f@GFBw@Wx6-OZ$R9Hpbc z_~-Z7X1%!shyCf22LvV}xAZE-{EL)gjqn3h8o1+F7p>?d9{Ue-2v1QuvL(Zb4c`}2 zYM~2n$@_ZALXD(jzDM3sW~SZ5FQn1Cf-d8N^+n+1E~gM{MrKNuHtvWp%{l#6`FBdu zn5%*>6{KIJToYFH--6oD=tR9n)PLc#?Px zXe~F+e&*0|52BHGZC9ZBT)an4Fs(1U6k?GE3BS3T*0A{;Zj2aj1sgtr*GY|3x;jCg zPvTK0{!Nx@DA`&Q)#N=A)uuW-Vk2*~t;vjk3=S3?vV(e97Da4!@N^2#ZK)4&Bn{wd zN~KlsN=0MijGV!c{rK{!-UIk(li^bH!(yZN%?4_fX~JL+R82ytx$GR^*FfxZ4LQZi zT&E~*XdjjFeOYBs+lhC5UY!8F{RFc$vt?nK#)jjDh#R=9Z2Gw^5-6%ax7bccd&1Hx zrj)}az9IAX90l1hqQ1kvx1c=Y17sQ;Z<6&Qf*LV$SyvLN-)$uVUOlQpkw!GKY|6=X7 zJ2pay(N>pluQ|)(g8$Yu5X~>i8zsAckdJF}Yy@8K`aYd%jG_|D-3Y6z^M3xSRAcSJQE^n8&P*}vv!9Coc0Ps4>V3{yQX^CVc= zo=?`mohR3cO6@}xYEO~}+c?#Lb^KtA*Q)#mabpzk-p@Z^Tat}YoI!foaSI3vR-?KC zL5pkOkAFaC(`LE{QXn&z3JusKWEn(zrXb^WcUiwJlzktc+cf-&5xt7Nu#(- z?RC~-k*@!tk}rl}3P?tR2WZzZoRBPA@M462a5xL=b^bU}61Qk_|%o#bkASWA~_cdhVmB zvzqklu0iZ~R!MQw>A3^y(OeT~G^6R*T`A|tO&p$8fflcZrr>)vm_WP9Ak`kU@2I!dmrr$w&K@Z;IO4-W0k?(yhYu9q84MkRD7glh@vn7DDwhk^t(H?Q)Q+! z&ji;RPLuK{`6-NA>h;b5EyoIGms}utp`M;JGO^(jmasN{rm~6fY&)7_og)wC+9nUgdCc3BZRn5eN#x9}lKCvc(jy}(R?qdd; zx+dm7<#elmeJBd=9wcT$ZTeg5`}CV^Dr?mFaP*%m8AY`>Tmq~&hb!T)MZ{fCzkX$(RV}}c7ysaLER^pd!||T2GVftNBD)N8TKFBaWU^Z5d&9xnnf;Od zbFWjLt`d%J<{r&pLZ1~;xu)NZp23DK1B1Dqpm)Z~$)haYo|!j~e8fYMT$igy=YsZ? zHGz-0Zi}9ow+ipfoihWdxm0J1cC0fOtmUl*M5Q@gGYT%wM#J{dEpYaBS04r z7E|mhd87a0m0BOa_yNz0aeKuv2NoaIRfmnNUM9AN)*FEI#ALPl+i7F&=5W;mka%H$ zSskvB#!QSP0a$N!DWK6?&s5mTUgT=1NCgYCR$xLdJ9hwWXYB@mc5&^b2VLc%>pM0l zbS;pxm7LNW&zC3yD;sewjrJI1@^LM{zOw;vW4q^XNn`!`N6*IT&Hc<8RydqA z?ntK){ZG#8$iJV5Btm4f*qj$CiIfdG(9UW()pucdm8|Sr<~UM$#jOgNrO{c0e-Ym^ zBlG|yX}rQ4>|I!i`CQi1xpB^o*ZWCrbdwL2lkoxYkACRVa&C_iGP z3F3$wa_ePuxSJ=Vtpv^By=JH1+TjoPS;VPX_q=l==0oJwCMhe+`n$`X(@qxEV7(_v zn#iF&CRbAy|7UWd@IP{S>ZRU^F}Ip# zqyLjy@y~xSlJC7RXKOav|B+VX_5NzxShd2h|7l|XU+{nV?my{4|F@<7^9E7*`-oIp UokaDW^#vg#p&(uXY5{YGyni7 zbzLQ3W$ghh57yJ83O7{>fMPZv+Am*y{)q&6?h z#808bfC6BlS#Huce!9Sy>|UMd+S_kR>vRhMsEFVv@+@80!M8t~n_&RR{&hf52*IpI z%l7NF-J36bTIJN!))(#heXEn1!tCT$iCG4Zq@HJ5?yq(B8L3avGv=QtQX*#j^_&lkeh%5+dE=LlC%T)*KFNT^!r z$dfsW=YFEbV@UgX`#K}&e&7>E0Qn;%=war`AsfYTE@m_{&$BxQuRSj z*s1g77!c4~P4EdG5j*P>U_bF{V__-h*_%Q#5@is9l^Ut7bgp_D!`EcO$MkBnXY@SW z=``mt<1?8L-6Kyv*CA)P-;>qWmRL&p(<;61(X8C6b7Q2^PO@0fBnbLr=JRBanZL=d_1-M* zx1LHiSkx?>3(qCjw^To$f54?~CELD#_5K;JPVKziLMWcmb!z!t)0&)jOC-@j*Ok?0 zssOwS0s?}#j_ubgA+0y8fT_Y7#fxCTmJUGvg20|z#AwHRJqA#&M$%#2_lez-fW$IQ zX&?U@#1f6F12OSMnu!znTM~|n zTjJ?1yXN55PslNf#mI0%bzM>p2T)%;kz zLOorDhAg*Ve@(yonmmHqRmY2$Cc|ru@fU5c5zy^vZn6KC;1mg@#2Ch-$U3Z1R&ozU z*2++&1e5MJ#?NIl;?KWKmt&o@D1D{_vQF|neq}GPmgy{_ zBhl}P$_z<1#O++$>=gf5X>s0T0A z`gw*x&*_9oZOBMS%a-S+?gc*Jqs!3>c`X;pW}5K5$Et_F=eTE~$EnBPQJ^kOi-$Ec zUk~q%C0*KcLt*|ZUK}3>za6(<#$skeKa+C+LS7+7l$$wIq+h9Dr{ALA@XHm0UE-bI zG`a`0Mo^O%#*IeSg4u$0&%@twek}y58N7j-G8>6hP)(Umg-lIbTU$S}?y$x-^jIG@ z2sfO-3cd!HAEQbQk*|GVnK1HMlUdnWiCNECcf|rP9h%az@xvbve)t{HACVlL&Qjts zW^FxDC(1ZCoHkBm7VImi*r@1HAf$Ltkx}u&SM0~pmWeOUSD{sTp{4a}D~TU``{akj z(fpC)@voy7TevZ$vQzz{H}Ep9Qt%jfeCd6JMrS5U;yo)naZz1*U5roSI7CglR&E*S z%=Vn^rzA{jOJ+)r-See~CCt(@aHcjYK?3*E7Al5hMS>5B53%BCho24@hom?e%8!yi z3&y=mp70hM=Zt%m>}YePSoJydDfEdX=OypuE#H248*2c4=^?HFnUI=)(tP86yW^5L=Pg4=?-lY`U< zw9%k_n8PNqDy^#DzY51X7vO*5FXVrBR_413TCRR`mihbi+!FGY5}|^AK)GM@hOY0C z&ybJpUf_2!zhAy|3g$|72khAlQA0@eP%Eha?L(N>&)6i@s28( z!GlATbP;&@9M?Yr-E;0F^`Xj$VJ8jxe1IKVVW;TT;MEw^n8O@(Kf}Dka7Nq2xWr7y zooWYa4<*Vo>BT1}+R_GPy7#RkcbeOs+CPu5hDatdW!*AoJQkc4vSwP4q~?(GpMU3b z>geIbCt*+rdcpQl_#`|(Z2qWusgM5i=ad@G8@|Om;r^F7gxZ*?Xbv(sy(V2Y|Dpjx z*MzU(_70!n{pttd*>t(}l5d?b- zS8Fyola>M@AsguTPv4tcExZO9$CT>~(OM+fLv^6=q3~?_JoFtGkQ{$N0H*?2W$%@i z{WD$#h2c=^l@I7_}GBSH~|mSj{m^!X$;~#Eh}K<`E^uKL z42i5iT(4edTrYaDpPhg61y?puMEn9O=4rFh;L(j(MSS_iCKCC`!@i=a9tPQb=Tix^ zKtIZ^3P(e?bCHZCBC^$QtIglz%9E=-9=o8u&RY%5U%%V=%Ki1DyS_Wqbej#>G6JHx z9~+wjWgC?>_fvV=G^KVLU%SjpLuf3VVjEH zA68e+IkY-isaV&P?=330Zm!!Ef6QgG14v=Msw1(nxwN6al>OD@87E3(&Lykd_LKBj_e%z<5jLcg2R|-2^g$>H_wNdYsu*1-h);ut>gJ6?x?G>AU9UeMB!wfiIZM&cl?dUIm)O66_D_LbEkW~KMKGxDZ&Mya@I=k3q+ zPnG;NDfaLXsow_f_O?q=O+oOBrL1hn-_S4|EZbsl!pH6rg9c&8(l$;x{3bkqMj9qHwJ(Y;1oAm z03pk@c{D96XhdZp3#AcFT5&OLP-)!9+$3T{<>*raf|_9fr~$BVB`ZGG8UAGXkm==5 zPe7`HMG-hlBibdZ$S!Lu@tz&o*~4q9LI0-+@C66ipRWa(x2_cu0AS+(+wt^F`1Sz+ zJS$f-OJ7R^eMKj@rpaP{_eg?oYi@;W%e z{d`q;c>W6gxBaV5Usve=DtRIQZR?Lg@ZTMz~fsJL(6$~2#ZN%xT&oLNW^Tk&4R z)%eNn0JXfaQBP%sfOU*`+;0tqO-uLu3j?Y*^Oa8hp^8o+RcvQY9njrV`A%d9a^!U3 zFl_UY9v>a?uS5q?)_U5+*E6^q7tKND7Z1x;5G`xjDncS65qbF_{-v;b)<$e3^Qdfk}n-N+-f1bV^lXm`H@%l%_2Z<(e_cos&AI4AoSRnSdYr+%4Yx4ZQTY#`|+uw!DEbt)h z;$zv7e1#~d&#A>#Ra93^d&TE!GM811V*{|sV6&|RX%l0P%&S9|@Brc}Pd4w16VH6r zFrZEvXVw+w5&tn^2lASsBY1r-5IPL4f8jS0K1hkdw%?0mc9;W0!3GbApmwI1)1-jK$ommDEL*)bm~3Ngb)47B z`{b`aMg2$)fnQ4g-nEO{<{|6m)x@vb-v4B=qn#@{D({d$%IVTNT2qWb+iYcgA=RZ3 z9nwLu$e5>Ql*(n3){;=t>xdpAk6yjj#^%J9s=ZwzcUJQgG16B_pD^W#UnO+S`6v@( zw5Bda_V}#4g80&%2uLN6@lsE{Slq~rH8<%;f-A>C?T>2&S1%A4aM4fy_?f4uwiB)G z8n--#g3Dn_rD&FS>@1#wVnyEI&1{WR2W3!He1|w^*|4yz!}R1w7=mEip_v&x@$YK$kgoWx#1@UX#))M0r*-jQA}fuqr)-&)8Fsh?oLN674E2kr)k zoj|;wEKJR#44eF3hg{q5x11d}u^6263g9GOOYN%7?aWNnQzh`Vcg!`xKUc<4uk5xf zSu)eC?!tF@e~SxU+%t9sk9Dl%p37Hrzs|=E#Xe(N}0v6g`5Nb1B zrwUh5i^a%Q|4^uI8O7?muw(CpF(R7Ar>EmmQ>l%E!#wFrC+JB-KTyiBg?d}K>W-9( zhk;}J?Mp06zOAaZU)TnRgw#O>nT%e(2*e%)dLy@pJ5ElR_XklT3?h-`zBRN!@Qy8l zJR*;+D~W41rg}7#3s-DFHzfZ^h<(TEQ-k|-EqqTk`g6CWp5Pb{invX{HR{)#1OqF> zY-_8Z{LRkcOr2BQop`b~yml|Je6!zDb7E8V&8<$35U$K4>x-~Y0&I6t*^TY71$l+) zC7omkYpbX)k!#Biqm-1CECPmw(m>#L5Gwl;2xLpHt`>9h_D)|pK|wgyZ4X@wOW6Qi z7g$pVCuwE3WL1`o+zaIq-#*CUx^1#_$~42)-W~~O~v8ph3NapbM9KU&V$k*FpbEJTj5FdYIu6fdt^>s>Gn$yko)x&eoP`EvKdwYBT zW8vw+VndU${X)E1uClXLhMkl}pT-D;b0~S`lPTEgG;=hH+E!{{@8TO$7e-OL*RfIT z`VdBjk9lfjeKuR1|m~L45V>Cm+S;G(05@Ot*MCJZwOaPt|v6X zbM!HMjKxVBzIhKyvAS=V$$d7GgC=|-yls&9_K%7EqYfjx^1UvH8y}=1J?;gv$qPL^ z82r-6=;KywdYSm2$%^_+eG>Qf{FQ*P@LlYzPK~a1cdxug?t#*sC6UB65sW5_h(2?} z1Q|DaM@A&EwcU%wOY!V1YEUvJ6Vc=rUQxHm*+OpQwrw1?Uy(JaEAEp&@XIeM32G2} z6->ifPYr!`yC;&SZ!EHU81`vNyV;xbLD9;XsXt=|W4;VtW7IxI#~`J%9VvO0ffyKa zBGYTO+DG(a50{ISy!ww(bztQNX3e?StwE`XEvwW#W%bxC-|Py(N+?)K$c9w(1 zXNJ#jx+CtascP5*5s0OAoM37#b0Uyl>|%PKfNewtU6bJ62QP>j<3=BecDwkn5raGA&+T6vHgMmxfpT5=R}x z#_F(2;6c4P}8yKzFgQctf+!-_lH=mu!Bha#U|5-WXxh%Tx?R?Yo3#K`6 dn%(XN*W=D}u~!wGum8UO^zJ>>s@8ZC`9EBT2eSYG literal 0 HcmV?d00001 diff --git a/docs/index.rst b/docs/index.rst index 97a5107fa0eea..e39f16cdf9126 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -25,8 +25,8 @@ Apache Airflow Documentation Airflow is a platform to programmatically author, schedule and monitor workflows. -Use airflow to author workflows as directed acyclic graphs (DAGs) of tasks. -The airflow scheduler executes your tasks on an array of workers while +Use Airflow to author workflows as Directed Acyclic Graphs (DAGs) of tasks. +The Airflow scheduler executes your tasks on an array of workers while following the specified dependencies. Rich command line utilities make performing complex surgeries on DAGs a snap. The rich user interface makes it easy to visualize pipelines running in production, From 3041f2fcc227f2f870ed91f73a37a7bfd140b434 Mon Sep 17 00:00:00 2001 From: Felix Date: Mon, 11 Feb 2019 12:31:45 +0100 Subject: [PATCH 0071/1104] [AIRFLOW-3749] Fix Edit Dag Run page when using RBAC (#4613) --- airflow/www/templates/airflow/dag.html | 2 +- airflow/www/views.py | 2 +- tests/www/test_views.py | 17 +++++++++++++---- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/airflow/www/templates/airflow/dag.html b/airflow/www/templates/airflow/dag.html index 06e4773516e25..bb7b92d25fcb2 100644 --- a/airflow/www/templates/airflow/dag.html +++ b/airflow/www/templates/airflow/dag.html @@ -521,7 +521,7 @@
- {{ html_code|safe }} {% endblock %} diff --git a/tests/www/test_views.py b/tests/www/test_views.py index 9e99ec0553069..d7111df1354d3 100644 --- a/tests/www/test_views.py +++ b/tests/www/test_views.py @@ -31,6 +31,7 @@ from urllib.parse import quote_plus import mock +import jinja2 from flask import url_for from flask._compat import PY2 from parameterized import parameterized @@ -57,6 +58,7 @@ def setUpClass(cls): conf.load_test_config() cls.app, cls.appbuilder = application.create_app(session=Session, testing=True) cls.app.config['WTF_CSRF_ENABLED'] = False + cls.app.jinja_env.undefined = jinja2.StrictUndefined settings.configure_orm() cls.session = Session From ae33e7cad45055d4b9dfb7ba70fe7fe7627fa815 Mon Sep 17 00:00:00 2001 From: Felix Date: Sun, 17 Feb 2019 07:12:28 +0100 Subject: [PATCH 0106/1104] [AIRFLOW-3807] Fix Graph View Highlighting of Tasks (#4653) - fixes up_for_retry and up_for_reschedule tasks when hovering over retry/rescheduled task state - adds missing task states to stateFocusMap that will be used for highlighting tasks when clicking on task state - removed invalid attributes for some tags - reformatted accordingly to rules from .editorconfig [AIRFLOW-3807] Fix no_status tasks highlighting in graph view [AIRFLOW-3807] Change "no status" string to "no_status" [AIRFLOW-3807] Fix syntax issue in js statement [AIRFLOW-3807] Correct tree view tasks' status labels - reformat tree.html file - remove invalid attributes from tree.html tags --- airflow/www/static/js/graph.js | 2 +- airflow/www/templates/airflow/graph.html | 118 ++++++++++++----------- airflow/www/templates/airflow/tree.html | 88 +++++++++-------- 3 files changed, 107 insertions(+), 101 deletions(-) diff --git a/airflow/www/static/js/graph.js b/airflow/www/static/js/graph.js index 689623beabf7c..98f4d460674c5 100644 --- a/airflow/www/static/js/graph.js +++ b/airflow/www/static/js/graph.js @@ -27,7 +27,7 @@ function update_nodes_states(task_instances) { return $(this).text() === task_id; }) .parent().parent().parent().parent() - .attr("class", "node enter " + ti.state) + .attr("class", "node enter " + (ti.state ? ti.state : "no_status")) .attr("data-toggle", "tooltip") .attr("data-original-title", function (d) { // Tooltip diff --git a/airflow/www/templates/airflow/graph.html b/airflow/www/templates/airflow/graph.html index c5213c870d075..ac5ece68b0512 100644 --- a/airflow/www/templates/airflow/graph.html +++ b/airflow/www/templates/airflow/graph.html @@ -21,82 +21,81 @@ {% block head_css %} {{ super() }} - + {% endblock %} {% block content %} {{ super() }} - {% if doc_md %} -
{{ doc_md|safe }}
- {% endif %} -
-
- {{ state_token }} - Base date: {{ form.base_date(class_="form-control") }} - Number of runs: {{ form.num_runs(class_="form-control") }} - Run: - {{ form.execution_date(class_="form-control") | safe }} - Layout: - {{ form.arrange(class_="form-control") | safe }} - - - - -
-
- -
-
-
+{% if doc_md %} +
{{ doc_md|safe }}
+{% endif %} +
+
+ {{ state_token }} + Base date: {{ form.base_date(class_="form-control") }} + Number of runs: {{ form.num_runs(class_="form-control") }} + Run: + {{ form.execution_date(class_="form-control") | safe }} + Layout: + {{ form.arrange(class_="form-control") | safe }} + + + + +
+
+ +
+
+

- {% for op in operators %} -
- {{ op.__name__ }} -
- {% endfor %} - -
-
no status
+ {% for op in operators %} +
+ {{ op.__name__ }} +
+ {% endfor %} + +
+
no_status
queued
-
retry
-
rescheduled
+
up_for_retry
+
up_for_reschedule
skipped
failed
running
success
-
-
+
+

- - - - - - - spinner + + + + + + + spinner

{% endblock %} {% block tail %} - {{ super() }} +{{ super() }} - - - + + - + + + + {% endblock %} diff --git a/airflow/www/templates/airflow/tree.html b/airflow/www/templates/airflow/tree.html index e0fb57f56ebf7..68b05af89672d 100644 --- a/airflow/www/templates/airflow/tree.html +++ b/airflow/www/templates/airflow/tree.html @@ -20,67 +20,64 @@ {% block head_css %} {{ super() }} + - + href="{{ url_for('appbuilder.static',filename='datepicker/bootstrap-datepicker.css') }}"> {% endblock %} {% block content %} {{ super() }}
-
- Base date: {{ form.base_date(class_="form-control") }} - Number of runs: {{ form.num_runs(class_="form-control") }} - - - - -
+
+ Base date: {{ form.base_date(class_="form-control") }} + Number of runs: {{ form.num_runs(class_="form-control") }} + + + + +

-
no status
-
-
queued
-
-
retry
-
-
rescheduled
-
-
skipped
-
-
failed
-
-
running
-
-
success
-
- {% for op in operators %} -
-
-
{{ op.__name__ }}
- {% endfor %} -
+
no_status
+
+
queued
+
+
up_for_retry
+
+
up_for_reschedule
+
+
skipped
+
+
failed
+
+
running
+
+
success
+
+ {% for op in operators %} +
+
+
{{ op.__name__ }}
+ {% endfor %} +

- - - - - - + + + + + +
{% endblock %} {% block tail %} - {{ super() }} - - + + + {% endblock %} From b71a4b6c634d7b7b0ffd97699c7039ad76ff900e Mon Sep 17 00:00:00 2001 From: Ryan Yuan Date: Mon, 18 Feb 2019 09:19:33 +1100 Subject: [PATCH 0107/1104] [AIRFLOW-3799] Add compose method to GoogleCloudStorageHook (#4641) --- airflow/contrib/hooks/gcs_hook.py | 48 ++++++++++++++++ tests/contrib/hooks/test_gcs_hook.py | 82 ++++++++++++++++++++++++++++ 2 files changed, 130 insertions(+) diff --git a/airflow/contrib/hooks/gcs_hook.py b/airflow/contrib/hooks/gcs_hook.py index 6158e7a35a826..f3ba289529195 100644 --- a/airflow/contrib/hooks/gcs_hook.py +++ b/airflow/contrib/hooks/gcs_hook.py @@ -655,6 +655,54 @@ def insert_object_acl(self, bucket, object_name, entity, role, generation, 'Object ACL entry creation failed. Error was: {}'.format(ex.content) ) + def compose(self, bucket, source_objects, destination_object, num_retries=5): + """ + Composes a list of existing object into a new object in the same storage bucket + + Currently it only supports up to 32 objects that can be concatenated + in a single operation + + https://cloud.google.com/storage/docs/json_api/v1/objects/compose + + :param bucket: The name of the bucket containing the source objects. + This is also the same bucket to store the composed destination object. + :type bucket: str + :param source_objects: The list of source objects that will be composed + into a single object. + :type source_objects: list + :param destination_object: The path of the object if given. + :type destination_object: str + """ + + if not source_objects or not len(source_objects): + raise ValueError('source_objects cannot be empty.') + + if not bucket or not destination_object: + raise ValueError('bucket and destination_object cannot be empty.') + + service = self.get_conn() + + dict_source_objects = [{'name': source_object} + for source_object in source_objects] + body = { + 'sourceObjects': dict_source_objects + } + + try: + self.log.info("Composing %s to %s in the bucket %s", + source_objects, destination_object, bucket) + service \ + .objects() \ + .compose(destinationBucket=bucket, + destinationObject=destination_object, + body=body) \ + .execute(num_retries=num_retries) + return True + except HttpError as ex: + if ex.resp['status'] == '404': + return False + raise + def _parse_gcs_url(gsurl): """ diff --git a/tests/contrib/hooks/test_gcs_hook.py b/tests/contrib/hooks/test_gcs_hook.py index 0f1f056dbe9f9..abd63fb15c4ba 100644 --- a/tests/contrib/hooks/test_gcs_hook.py +++ b/tests/contrib/hooks/test_gcs_hook.py @@ -342,6 +342,88 @@ def test_delete_nonexisting_object(self, mock_service): self.assertFalse(response) + @mock.patch(GCS_STRING.format('GoogleCloudStorageHook.get_conn')) + def test_compose(self, mock_service): + test_bucket = 'test_bucket' + test_source_objects = ['test_object_1', 'test_object_2', 'test_object_3'] + test_destination_object = 'test_object_composed' + + method = (mock_service.return_value.objects.return_value.compose) + + self.gcs_hook.compose( + bucket=test_bucket, + source_objects=test_source_objects, + destination_object=test_destination_object + ) + + body = { + 'sourceObjects': [ + {'name': 'test_object_1'}, + {'name': 'test_object_2'}, + {'name': 'test_object_3'} + ] + } + + method.assert_called_once_with( + destinationBucket=test_bucket, + destinationObject=test_destination_object, + body=body + ) + + @mock.patch(GCS_STRING.format('GoogleCloudStorageHook.get_conn')) + def test_compose_with_empty_source_objects(self, mock_service): + test_bucket = 'test_bucket' + test_source_objects = [] + test_destination_object = 'test_object_composed' + + with self.assertRaises(ValueError) as e: + self.gcs_hook.compose( + bucket=test_bucket, + source_objects=test_source_objects, + destination_object=test_destination_object + ) + + self.assertEqual( + str(e.exception), + 'source_objects cannot be empty.' + ) + + @mock.patch(GCS_STRING.format('GoogleCloudStorageHook.get_conn')) + def test_compose_without_bucket(self, mock_service): + test_bucket = None + test_source_objects = ['test_object_1', 'test_object_2', 'test_object_3'] + test_destination_object = 'test_object_composed' + + with self.assertRaises(ValueError) as e: + self.gcs_hook.compose( + bucket=test_bucket, + source_objects=test_source_objects, + destination_object=test_destination_object + ) + + self.assertEqual( + str(e.exception), + 'bucket and destination_object cannot be empty.' + ) + + @mock.patch(GCS_STRING.format('GoogleCloudStorageHook.get_conn')) + def test_compose_without_destination_object(self, mock_service): + test_bucket = 'test_bucket' + test_source_objects = ['test_object_1', 'test_object_2', 'test_object_3'] + test_destination_object = None + + with self.assertRaises(ValueError) as e: + self.gcs_hook.compose( + bucket=test_bucket, + source_objects=test_source_objects, + destination_object=test_destination_object + ) + + self.assertEqual( + str(e.exception), + 'bucket and destination_object cannot be empty.' + ) + class TestGoogleCloudStorageHookUpload(unittest.TestCase): def setUp(self): From b39a6795691f10a1eba9432971feed9893dd5c4e Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Sun, 17 Feb 2019 21:06:27 -0800 Subject: [PATCH 0108/1104] [AIRFLOW-3885] ~20x speed-up of slowest unit test (#4726) The test `SchedulerJobTest.test_scheduler_start_date` is the slowest test, taking ~5 minutes on average: >>> [success] 12.99% tests.test_jobs.SchedulerJobTest.test_scheduler_start_date: 295.1935s [success] 6.79% tests.test_jobs.SchedulerJobTest.test_scheduler_multiprocessing: 154.2304s [success] 6.72% tests.test_jobs.SchedulerJobTest.test_scheduler_task_start_date: 152.7215s [success] 4.34% tests.test_jobs.SchedulerJobTest.test_new_import_error_replaces_old: 98.7339s [success] 3.63% tests.test_jobs.SchedulerJobTest.test_remove_error_clears_import_error: 82.4062s After setting the subdirectory and eliminating (I think) redundant scheduler loops, the test time comes down to ~15 seconds. --- tests/test_jobs.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 84acfbb6c0f15..42046d2fbb5f1 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -2447,7 +2447,8 @@ def test_scheduler_start_date(self): self.assertTrue(dag.start_date > datetime.datetime.utcnow()) scheduler = SchedulerJob(dag_id, - num_runs=2) + subdir=os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py'), + num_runs=1) scheduler.run() # zero tasks ran @@ -2471,7 +2472,8 @@ def test_scheduler_start_date(self): session.commit() scheduler = SchedulerJob(dag_id, - num_runs=2) + subdir=os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py'), + num_runs=1) scheduler.run() # still one task From 725bc2e748c630acc7de991d682632bd61e4dbe2 Mon Sep 17 00:00:00 2001 From: zhongjiajie Date: Mon, 18 Feb 2019 16:21:50 +0800 Subject: [PATCH 0109/1104] [AIRFLOW-3733] Don't raise NameError in HQL hook to_csv when no rows returned (#4560) --- airflow/hooks/hive_hooks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py index 2efeb3408a9c9..b701ebb0d1989 100644 --- a/airflow/hooks/hive_hooks.py +++ b/airflow/hooks/hive_hooks.py @@ -897,6 +897,7 @@ def to_csv( header = next(results_iter) message = None + i = 0 with open(csv_filepath, 'wb') as f: writer = csv.writer(f, delimiter=delimiter, From 480eeff027e949a21acbf1a7bc201e272be2d94b Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Mon, 18 Feb 2019 17:13:45 -0800 Subject: [PATCH 0110/1104] [AIRFLOW-3885] ~2.5x speed-up for backfill tests (#4731) The BackfillJobTest suite now takes 57 seconds vs. the baseline of 147 seconds on my laptop. A couple of optimizations: - Don't sleep() if we are running unit tests - Don't backfill more DagRuns than needed (reduced from 5 to 2, since we only need 2 DagRuns to verify that we can run backwards) I've also made a few tests reentrant by clearing out the Pool, DagRun, and TaskInstance table between runs. --- airflow/jobs.py | 20 +++++++++++--------- tests/test_jobs.py | 7 ++++++- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/airflow/jobs.py b/airflow/jobs.py index 39e3462faae66..00d4864b1d3a9 100644 --- a/airflow/jobs.py +++ b/airflow/jobs.py @@ -168,15 +168,17 @@ def heartbeat(self): if job.state == State.SHUTDOWN: self.kill() - # Figure out how long to sleep for - sleep_for = 0 - if job.latest_heartbeat: - sleep_for = max( - 0, - self.heartrate - (timezone.utcnow() - - job.latest_heartbeat).total_seconds()) - - sleep(sleep_for) + is_unit_test = conf.getboolean('core', 'unit_test_mode') + if not is_unit_test: + # Figure out how long to sleep for + sleep_for = 0 + if job.latest_heartbeat: + seconds_remaining = self.heartrate - \ + (timezone.utcnow() - job.latest_heartbeat)\ + .total_seconds() + sleep_for = max(0, seconds_remaining) + + sleep(sleep_for) # Update last heartbeat time with create_session() as session: diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 42046d2fbb5f1..e16c9d9a0a7f8 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -133,6 +133,11 @@ def abort(): class BackfillJobTest(unittest.TestCase): def setUp(self): + with create_session() as session: + session.query(models.DagRun).delete() + session.query(models.Pool).delete() + session.query(models.TaskInstance).delete() + self.parser = cli.CLIFactory.get_parser() self.dagbag = DagBag(include_examples=True) @@ -1207,7 +1212,7 @@ def test_backfill_run_backwards(self): job = BackfillJob( dag=dag, start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=5), + end_date=DEFAULT_DATE + datetime.timedelta(days=1), run_backwards=True ) job.run() From c4b9d885f47b831f177532ab41a03cf5a8d73253 Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Mon, 18 Feb 2019 17:23:28 -0800 Subject: [PATCH 0111/1104] [AIRFLOW-3885] ~10x speed-up of SchedulerJobTest suite (#4730) The SchedulerJobTest suite now takes ~90 seconds on my laptop (down from ~900 seconds == 15 minutes) on Jenkins. There are a few optimizations here: 1. Don't sleep() for 1 second every scheduling loop (in unit tests) 2. Don't process the example DAGs 3. Use `subdir` to process only the DAGs we need, for a couple of tests that actually run the scheduler 4. Only load the DagBag once instead of before each test I've also added a few tables to the list of tables that are cleaned up in between test runs to make the tests re-entrant. --- airflow/jobs.py | 9 ++++++--- airflow/utils/dag_processing.py | 4 +++- tests/test_jobs.py | 27 +++++++++++++++++++++++++-- 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/airflow/jobs.py b/airflow/jobs.py index 00d4864b1d3a9..7e14d1514d501 100644 --- a/airflow/jobs.py +++ b/airflow/jobs.py @@ -1622,13 +1622,16 @@ def _execute_helper(self): self.heartbeat() last_self_heartbeat_time = timezone.utcnow() + is_unit_test = conf.getboolean('core', 'unit_test_mode') loop_end_time = time.time() loop_duration = loop_end_time - loop_start_time self.log.debug( "Ran scheduling loop in %.2f seconds", loop_duration) - self.log.debug("Sleeping for %.2f seconds", self._processor_poll_interval) - time.sleep(self._processor_poll_interval) + + if not is_unit_test: + self.log.debug("Sleeping for %.2f seconds", self._processor_poll_interval) + time.sleep(self._processor_poll_interval) # Exit early for a test mode, run one additional scheduler loop # to reduce the possibility that parsed DAG was put into the queue @@ -1641,7 +1644,7 @@ def _execute_helper(self): " have been processed {} times".format(self.num_runs)) break - if loop_duration < 1: + if loop_duration < 1 and not is_unit_test: sleep_length = 1 - loop_duration self.log.debug( "Sleeping for {0:.2f} seconds to prevent excessive logging" diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index 5cc99b7c57f53..1473c016c7288 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -277,7 +277,7 @@ def get_dag(self, dag_id): def list_py_file_paths(directory, safe_mode=True, - include_examples=conf.getboolean('core', 'LOAD_EXAMPLES')): + include_examples=None): """ Traverse a directory and look for Python files. @@ -288,6 +288,8 @@ def list_py_file_paths(directory, safe_mode=True, :return: a list of paths to Python files in the specified directory :rtype: list[unicode] """ + if include_examples is None: + include_examples = conf.getboolean('core', 'LOAD_EXAMPLES') file_paths = [] if directory is None: return [] diff --git a/tests/test_jobs.py b/tests/test_jobs.py index e16c9d9a0a7f8..bd0fcf34a8446 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -1392,12 +1392,33 @@ def test_localtaskjob_double_trigger(self): class SchedulerJobTest(unittest.TestCase): def setUp(self): - self.dagbag = DagBag() with create_session() as session: session.query(models.DagRun).delete() + session.query(models.TaskInstance).delete() + session.query(models.Pool).delete() + session.query(models.DagModel).delete() + session.query(SlaMiss).delete() session.query(errors.ImportError).delete() session.commit() + @classmethod + def setUpClass(cls): + cls.dagbag = DagBag() + + def getboolean(section, key): + if section.lower() == 'core' and key.lower() == 'load_examples': + return False + else: + return configuration.conf.getboolean(section, key) + + cls.patcher = mock.patch('airflow.jobs.conf.getboolean') + mock_getboolean = cls.patcher.start() + mock_getboolean.side_effect = getboolean + + @classmethod + def tearDownClass(cls): + cls.patcher.stop() + @staticmethod def run_single_scheduler_loop_with_no_dags(dags_folder): """ @@ -2495,6 +2516,7 @@ def test_scheduler_task_start_date(self): dag = self.dagbag.get_dag(dag_id) dag.clear() scheduler = SchedulerJob(dag_id, + subdir=os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py'), num_runs=2) scheduler.run() @@ -2517,7 +2539,8 @@ def test_scheduler_multiprocessing(self): dag.clear() scheduler = SchedulerJob(dag_ids=dag_ids, - num_runs=2) + subdir=os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py'), + num_runs=1) scheduler.run() # zero tasks ran From 6a7e0b7e4c006ab135b42cd9870983d4f2bcafd4 Mon Sep 17 00:00:00 2001 From: Nathaniel Ritholtz Date: Mon, 18 Feb 2019 20:26:46 -0500 Subject: [PATCH 0112/1104] [AIRFLOW-3911] Change Harvesting DAG parsing results to DEBUG log level (#4729) --- airflow/jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/jobs.py b/airflow/jobs.py index 7e14d1514d501..0253e0eb341df 100644 --- a/airflow/jobs.py +++ b/airflow/jobs.py @@ -1569,7 +1569,7 @@ def _execute_helper(self): "Waiting for processors to finish since we're using sqlite") self.processor_agent.wait_until_finished() - self.log.info("Harvesting DAG parsing results") + self.log.debug("Harvesting DAG parsing results") simple_dags = self.processor_agent.harvest_simple_dags() self.log.debug("Harvested {} SimpleDAGs".format(len(simple_dags))) From f722e48104d5346b0fb34398e9ae93a1ebcb2d52 Mon Sep 17 00:00:00 2001 From: dstandish Date: Mon, 18 Feb 2019 17:33:37 -0800 Subject: [PATCH 0113/1104] [AIRFLOW-3901] add role as optional config parameter for SnowflakeHook (#4721) --- airflow/contrib/hooks/snowflake_hook.py | 10 ++++++---- tests/contrib/hooks/test_snowflake_hook.py | 8 +++++--- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/airflow/contrib/hooks/snowflake_hook.py b/airflow/contrib/hooks/snowflake_hook.py index 3b785328703c2..4e5fb4ca76f76 100644 --- a/airflow/contrib/hooks/snowflake_hook.py +++ b/airflow/contrib/hooks/snowflake_hook.py @@ -40,6 +40,7 @@ def __init__(self, *args, **kwargs): self.warehouse = kwargs.pop("warehouse", None) self.database = kwargs.pop("database", None) self.region = kwargs.pop("region", None) + self.role = kwargs.pop("role", None) def _get_conn_params(self): """ @@ -51,6 +52,7 @@ def _get_conn_params(self): warehouse = conn.extra_dejson.get('warehouse', None) database = conn.extra_dejson.get('database', None) region = conn.extra_dejson.get("region", None) + role = conn.extra_dejson.get('role', None) conn_config = { "user": conn.login, @@ -59,7 +61,8 @@ def _get_conn_params(self): "database": self.database or database or '', "account": self.account or account or '', "warehouse": self.warehouse or warehouse or '', - "region": self.region or region or '' + "region": self.region or region or '', + "role": self.role or role or '', } return conn_config @@ -69,9 +72,8 @@ def get_uri(self): """ conn_config = self._get_conn_params() uri = 'snowflake://{user}:{password}@{account}/{database}/' - uri += '{schema}?warehouse={warehouse}' - return uri.format( - **conn_config) + uri += '{schema}?warehouse={warehouse}&role={role}' + return uri.format(**conn_config) def get_conn(self): """ diff --git a/tests/contrib/hooks/test_snowflake_hook.py b/tests/contrib/hooks/test_snowflake_hook.py index f660cac5d54f4..80f2db8612bdb 100644 --- a/tests/contrib/hooks/test_snowflake_hook.py +++ b/tests/contrib/hooks/test_snowflake_hook.py @@ -40,7 +40,8 @@ def setUp(self): self.conn.extra_dejson = {'database': 'db', 'account': 'airflow', 'warehouse': 'af_wh', - 'region': 'af_region'} + 'region': 'af_region', + 'role': 'af_role'} class UnitTestSnowflakeHook(SnowflakeHook): conn_name_attr = 'snowflake_conn_id' @@ -54,7 +55,7 @@ def get_connection(self, connection_id): self.db_hook = UnitTestSnowflakeHook() def test_get_uri(self): - uri_shouldbe = 'snowflake://user:pw@airflow/db/public?warehouse=af_wh' + uri_shouldbe = 'snowflake://user:pw@airflow/db/public?warehouse=af_wh&role=af_role' self.assertEqual(uri_shouldbe, self.db_hook.get_uri()) def test_get_conn_params(self): @@ -64,7 +65,8 @@ def test_get_conn_params(self): 'database': 'db', 'account': 'airflow', 'warehouse': 'af_wh', - 'region': 'af_region'} + 'region': 'af_region', + 'role': 'af_role'} self.assertEqual(conn_params_shouldbe, self.db_hook._get_conn_params()) def test_get_conn(self): From c48e83e43ffbd359640560b1503e6368ec876763 Mon Sep 17 00:00:00 2001 From: Joshua Carp Date: Mon, 18 Feb 2019 20:54:39 -0500 Subject: [PATCH 0114/1104] [AIRFLOW-3907] Upgrade flask and set cookie security flags. (#4725) --- airflow/config_templates/default_airflow.cfg | 6 ++++++ airflow/www/app.py | 4 ++++ setup.py | 2 +- 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/airflow/config_templates/default_airflow.cfg b/airflow/config_templates/default_airflow.cfg index c694df2512cfa..fff32a6694942 100644 --- a/airflow/config_templates/default_airflow.cfg +++ b/airflow/config_templates/default_airflow.cfg @@ -319,6 +319,12 @@ default_dag_run_display_number = 25 # Enable werkzeug `ProxyFix` middleware enable_proxy_fix = False +# Set secure flag on session cookie +cookie_secure = False + +# Set samesite policy on session cookie +cookie_samesite = + [email] email_backend = airflow.utils.email.send_email_smtp diff --git a/airflow/www/app.py b/airflow/www/app.py index 58f4d1e1a3abe..fde99743c58fd 100644 --- a/airflow/www/app.py +++ b/airflow/www/app.py @@ -55,6 +55,10 @@ def create_app(config=None, session=None, testing=False, app_name="Airflow"): app.config['TESTING'] = testing app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False + app.config['SESSION_COOKIE_HTTPONLY'] = True + app.config['SESSION_COOKIE_SECURE'] = conf.getboolean('webserver', 'COOKIE_SECURE') + app.config['SESSION_COOKIE_SAMESITE'] = conf.get('webserver', 'COOKIE_SAMESITE') + csrf.init_app(app) db = SQLA(app) diff --git a/setup.py b/setup.py index 557c7b5652f2b..b78d333178aa8 100644 --- a/setup.py +++ b/setup.py @@ -292,7 +292,7 @@ def do_setup(): 'croniter>=0.3.17, <0.4', 'dill>=0.2.2, <0.3', 'enum34~=1.1.6;python_version<"3.4"', - 'flask>=0.12.4, <0.13', + 'flask>=1.0, <2.0', 'flask-appbuilder==1.12.3', 'flask-admin==1.5.2', 'flask-caching>=1.3.3, <1.4.0', From d80e945f40fb6aaa95c308e6fb18797cad35e5f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Tue, 19 Feb 2019 16:25:38 +0100 Subject: [PATCH 0115/1104] [AIRFLOW-3616][AIRFLOW-1215] Add aliases for schema with underscore (#4523) --- airflow/models/connection.py | 24 +++++++++++---------- docs/concepts.rst | 8 +++---- docs/howto/manage-connections.rst | 35 +++++++++++++++++++++++++++---- tests/models.py | 17 +++++++++++++++ 4 files changed, 65 insertions(+), 19 deletions(-) diff --git a/airflow/models/connection.py b/airflow/models/connection.py index 913ee5db612b5..5237a05b6d32d 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -110,22 +110,24 @@ def __init__( self.extra = extra def parse_from_uri(self, uri): - temp_uri = urlparse(uri) - hostname = temp_uri.hostname or '' - conn_type = temp_uri.scheme + uri_parts = urlparse(uri) + hostname = uri_parts.hostname or '' + conn_type = uri_parts.scheme if conn_type == 'postgresql': conn_type = 'postgres' + elif '-' in conn_type: + conn_type = conn_type.replace('-', '_') self.conn_type = conn_type self.host = unquote(hostname) if hostname else hostname - quoted_schema = temp_uri.path[1:] + quoted_schema = uri_parts.path[1:] self.schema = unquote(quoted_schema) if quoted_schema else quoted_schema - self.login = unquote(temp_uri.username) \ - if temp_uri.username else temp_uri.username - self.password = unquote(temp_uri.password) \ - if temp_uri.password else temp_uri.password - self.port = temp_uri.port - if temp_uri.query: - self.extra = json.dumps(dict(parse_qsl(temp_uri.query))) + self.login = unquote(uri_parts.username) \ + if uri_parts.username else uri_parts.username + self.password = unquote(uri_parts.password) \ + if uri_parts.password else uri_parts.password + self.port = uri_parts.port + if uri_parts.query: + self.extra = json.dumps(dict(parse_qsl(uri_parts.query))) def get_password(self): if self._password and self.is_encrypted: diff --git a/docs/concepts.rst b/docs/concepts.rst index 1eced2827b8e5..9ca1978217739 100644 --- a/docs/concepts.rst +++ b/docs/concepts.rst @@ -402,13 +402,13 @@ for some basic load balancing and fault tolerance when used in conjunction with retries. Airflow also has the ability to reference connections via environment -variables from the operating system. But it only supports URI format. If you -need to specify ``extra`` for your connection, please use web UI. +variables from the operating system. Then connection parameters must +be saved in URI format. If connections with the same ``conn_id`` are defined in both Airflow metadata database and environment variables, only the one in environment variables -will be referenced by Airflow (for example, given ``conn_id`` ``postgres_master``, -Airflow will search for ``AIRFLOW_CONN_POSTGRES_MASTER`` +will be referenced by Airflow (for example, given ``conn_id`` +``postgres_master``, Airflow will search for ``AIRFLOW_CONN_POSTGRES_MASTER`` in environment variables first and directly reference it if found, before it starts to search in metadata database). diff --git a/docs/howto/manage-connections.rst b/docs/howto/manage-connections.rst index 25bd1492ef3b4..3f5433ac81a77 100644 --- a/docs/howto/manage-connections.rst +++ b/docs/howto/manage-connections.rst @@ -70,10 +70,15 @@ When referencing the connection in the Airflow pipeline, the ``conn_id`` should be the name of the variable without the prefix. For example, if the ``conn_id`` is named ``postgres_master`` the environment variable should be named ``AIRFLOW_CONN_POSTGRES_MASTER`` (note that the environment variable -must be all uppercase). Airflow assumes the value returned from the -environment variable to be in a URI format (e.g. -``postgres://user:password@localhost:5432/master`` or -``s3://accesskey:secretkey@S3``). +must be all uppercase). + +Airflow assumes the value returned from the environment variable to be in a URI +format (e.g.``postgres://user:password@localhost:5432/master`` or +``s3://accesskey:secretkey@S3``). The underscore character is not allowed +in the scheme part of URI, so it must be changed to a hyphen character +(e.g. `google-compute-platform` if `conn_type` is `google_compute_platform`). +Query parameters are parsed to one-dimensional dict and then used to fill extra. + .. _manage-connections-connection-types: @@ -159,6 +164,28 @@ Scopes (comma separated) issue `AIRFLOW-2522 `_. + When specifying the connection in environment variable you should specify + it using URI syntax, with the following requirements: + + * scheme part should be equals ``google-cloud-platform`` (Note: look for a + hyphen character) + * authority (username, password, host, port), path is ignored + * query parameters contains information specific to this type of + connection. The following keys are accepted: + + * ``extra__google_cloud_platform__project`` - Project Id + * ``extra__google_cloud_platform__key_path`` - Keyfile Path + * ``extra__google_cloud_platform__key_dict`` - Keyfile JSON + * ``extra__google_cloud_platform__scope`` - Scopes + + Note that all components of the URI should be URL-encoded. + + For example: + + .. code-block:: bash + + google-cloud-platform://?extra__google_cloud_platform__key_path=%2Fkeys%2Fkey.json&extra__google_cloud_platform__scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform&extra__google_cloud_platform__project=airflow + Amazon Web Services ~~~~~~~~~~~~~~~~~~~ diff --git a/tests/models.py b/tests/models.py index 67cf8c8195f06..364a9e78dbd70 100644 --- a/tests/models.py +++ b/tests/models.py @@ -3398,3 +3398,20 @@ def test_connection_from_uri_no_schema(self): self.assertEqual(connection.login, 'user') self.assertEqual(connection.password, 'password with space') self.assertEqual(connection.port, 1234) + + def test_connection_from_uri_with_underscore(self): + uri = 'google-cloud-platform://?extra__google_cloud_platform__key_' \ + 'path=%2Fkeys%2Fkey.json&extra__google_cloud_platform__scope=' \ + 'https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform&extra' \ + '__google_cloud_platform__project=airflow' + connection = Connection(uri=uri) + self.assertEqual(connection.conn_type, 'google_cloud_platform') + self.assertEqual(connection.host, '') + self.assertEqual(connection.schema, '') + self.assertEqual(connection.login, None) + self.assertEqual(connection.password, None) + self.assertEqual(connection.extra_dejson, dict( + extra__google_cloud_platform__key_path='/keys/key.json', + extra__google_cloud_platform__project='airflow', + extra__google_cloud_platform__scope='https://www.googleapis.com/' + 'auth/cloud-platform')) From daec2ff7db64e979c2d8f15787c0ccfdb433647e Mon Sep 17 00:00:00 2001 From: Xiaodong Date: Tue, 19 Feb 2019 23:42:26 +0800 Subject: [PATCH 0116/1104] [AIRFLOW-3910] Raise exception explicitly in Connection.get_hook() (#4728) Passing exception silently here makes debugging/troubleshooting very hard --- airflow/models/connection.py | 129 +++++++++++++++++------------------ 1 file changed, 63 insertions(+), 66 deletions(-) diff --git a/airflow/models/connection.py b/airflow/models/connection.py index 5237a05b6d32d..212116cb35548 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -184,72 +184,69 @@ def rotate_fernet_key(self): self._extra = fernet.rotate(self._extra.encode('utf-8')).decode() def get_hook(self): - try: - if self.conn_type == 'mysql': - from airflow.hooks.mysql_hook import MySqlHook - return MySqlHook(mysql_conn_id=self.conn_id) - elif self.conn_type == 'google_cloud_platform': - from airflow.contrib.hooks.bigquery_hook import BigQueryHook - return BigQueryHook(bigquery_conn_id=self.conn_id) - elif self.conn_type == 'postgres': - from airflow.hooks.postgres_hook import PostgresHook - return PostgresHook(postgres_conn_id=self.conn_id) - elif self.conn_type == 'hive_cli': - from airflow.hooks.hive_hooks import HiveCliHook - return HiveCliHook(hive_cli_conn_id=self.conn_id) - elif self.conn_type == 'presto': - from airflow.hooks.presto_hook import PrestoHook - return PrestoHook(presto_conn_id=self.conn_id) - elif self.conn_type == 'hiveserver2': - from airflow.hooks.hive_hooks import HiveServer2Hook - return HiveServer2Hook(hiveserver2_conn_id=self.conn_id) - elif self.conn_type == 'sqlite': - from airflow.hooks.sqlite_hook import SqliteHook - return SqliteHook(sqlite_conn_id=self.conn_id) - elif self.conn_type == 'jdbc': - from airflow.hooks.jdbc_hook import JdbcHook - return JdbcHook(jdbc_conn_id=self.conn_id) - elif self.conn_type == 'mssql': - from airflow.hooks.mssql_hook import MsSqlHook - return MsSqlHook(mssql_conn_id=self.conn_id) - elif self.conn_type == 'oracle': - from airflow.hooks.oracle_hook import OracleHook - return OracleHook(oracle_conn_id=self.conn_id) - elif self.conn_type == 'vertica': - from airflow.contrib.hooks.vertica_hook import VerticaHook - return VerticaHook(vertica_conn_id=self.conn_id) - elif self.conn_type == 'cloudant': - from airflow.contrib.hooks.cloudant_hook import CloudantHook - return CloudantHook(cloudant_conn_id=self.conn_id) - elif self.conn_type == 'jira': - from airflow.contrib.hooks.jira_hook import JiraHook - return JiraHook(jira_conn_id=self.conn_id) - elif self.conn_type == 'redis': - from airflow.contrib.hooks.redis_hook import RedisHook - return RedisHook(redis_conn_id=self.conn_id) - elif self.conn_type == 'wasb': - from airflow.contrib.hooks.wasb_hook import WasbHook - return WasbHook(wasb_conn_id=self.conn_id) - elif self.conn_type == 'docker': - from airflow.hooks.docker_hook import DockerHook - return DockerHook(docker_conn_id=self.conn_id) - elif self.conn_type == 'azure_data_lake': - from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook - return AzureDataLakeHook(azure_data_lake_conn_id=self.conn_id) - elif self.conn_type == 'azure_cosmos': - from airflow.contrib.hooks.azure_cosmos_hook import AzureCosmosDBHook - return AzureCosmosDBHook(azure_cosmos_conn_id=self.conn_id) - elif self.conn_type == 'cassandra': - from airflow.contrib.hooks.cassandra_hook import CassandraHook - return CassandraHook(cassandra_conn_id=self.conn_id) - elif self.conn_type == 'mongo': - from airflow.contrib.hooks.mongo_hook import MongoHook - return MongoHook(conn_id=self.conn_id) - elif self.conn_type == 'gcpcloudsql': - from airflow.contrib.hooks.gcp_sql_hook import CloudSqlDatabaseHook - return CloudSqlDatabaseHook(gcp_cloudsql_conn_id=self.conn_id) - except Exception: - pass + if self.conn_type == 'mysql': + from airflow.hooks.mysql_hook import MySqlHook + return MySqlHook(mysql_conn_id=self.conn_id) + elif self.conn_type == 'google_cloud_platform': + from airflow.contrib.hooks.bigquery_hook import BigQueryHook + return BigQueryHook(bigquery_conn_id=self.conn_id) + elif self.conn_type == 'postgres': + from airflow.hooks.postgres_hook import PostgresHook + return PostgresHook(postgres_conn_id=self.conn_id) + elif self.conn_type == 'hive_cli': + from airflow.hooks.hive_hooks import HiveCliHook + return HiveCliHook(hive_cli_conn_id=self.conn_id) + elif self.conn_type == 'presto': + from airflow.hooks.presto_hook import PrestoHook + return PrestoHook(presto_conn_id=self.conn_id) + elif self.conn_type == 'hiveserver2': + from airflow.hooks.hive_hooks import HiveServer2Hook + return HiveServer2Hook(hiveserver2_conn_id=self.conn_id) + elif self.conn_type == 'sqlite': + from airflow.hooks.sqlite_hook import SqliteHook + return SqliteHook(sqlite_conn_id=self.conn_id) + elif self.conn_type == 'jdbc': + from airflow.hooks.jdbc_hook import JdbcHook + return JdbcHook(jdbc_conn_id=self.conn_id) + elif self.conn_type == 'mssql': + from airflow.hooks.mssql_hook import MsSqlHook + return MsSqlHook(mssql_conn_id=self.conn_id) + elif self.conn_type == 'oracle': + from airflow.hooks.oracle_hook import OracleHook + return OracleHook(oracle_conn_id=self.conn_id) + elif self.conn_type == 'vertica': + from airflow.contrib.hooks.vertica_hook import VerticaHook + return VerticaHook(vertica_conn_id=self.conn_id) + elif self.conn_type == 'cloudant': + from airflow.contrib.hooks.cloudant_hook import CloudantHook + return CloudantHook(cloudant_conn_id=self.conn_id) + elif self.conn_type == 'jira': + from airflow.contrib.hooks.jira_hook import JiraHook + return JiraHook(jira_conn_id=self.conn_id) + elif self.conn_type == 'redis': + from airflow.contrib.hooks.redis_hook import RedisHook + return RedisHook(redis_conn_id=self.conn_id) + elif self.conn_type == 'wasb': + from airflow.contrib.hooks.wasb_hook import WasbHook + return WasbHook(wasb_conn_id=self.conn_id) + elif self.conn_type == 'docker': + from airflow.hooks.docker_hook import DockerHook + return DockerHook(docker_conn_id=self.conn_id) + elif self.conn_type == 'azure_data_lake': + from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook + return AzureDataLakeHook(azure_data_lake_conn_id=self.conn_id) + elif self.conn_type == 'azure_cosmos': + from airflow.contrib.hooks.azure_cosmos_hook import AzureCosmosDBHook + return AzureCosmosDBHook(azure_cosmos_conn_id=self.conn_id) + elif self.conn_type == 'cassandra': + from airflow.contrib.hooks.cassandra_hook import CassandraHook + return CassandraHook(cassandra_conn_id=self.conn_id) + elif self.conn_type == 'mongo': + from airflow.contrib.hooks.mongo_hook import MongoHook + return MongoHook(conn_id=self.conn_id) + elif self.conn_type == 'gcpcloudsql': + from airflow.contrib.hooks.gcp_sql_hook import CloudSqlDatabaseHook + return CloudSqlDatabaseHook(gcp_cloudsql_conn_id=self.conn_id) def __repr__(self): return self.conn_id From 6bfa0bab2c68bdc43449b57d0adf9d5fb3c78471 Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Tue, 19 Feb 2019 10:16:33 -0800 Subject: [PATCH 0117/1104] [AIRFLOW-3885] Fix race condition in scheduler test (#4737) We're hitting this race condition frequently now that we don't sleep() during unit tests. We don't actually need to assert that the task is currently running - it's fine if it has already run successfully. --- tests/test_jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index bd0fcf34a8446..3ead508664661 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -3249,7 +3249,7 @@ def run_with_error(task): executor.do_update = True do_schedule() ti.refresh_from_db() - self.assertEqual(ti.state, State.RUNNING) + self.assertIn(ti.state, [State.RUNNING, State.SUCCESS]) @unittest.skipUnless("INTEGRATION" in os.environ, "Can only run end to end") def test_retry_handling_job(self): From de75b7a2bd7f5bef6a1d09942e0b43c17a3fbb95 Mon Sep 17 00:00:00 2001 From: David Smith Date: Tue, 19 Feb 2019 21:10:05 -0800 Subject: [PATCH 0118/1104] [AIRFLOW-3923] Update flask-admin dependency to 1.5.3 to resolve security vulnerabilities from safety (#4739) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b78d333178aa8..d090d0b9a9f5a 100644 --- a/setup.py +++ b/setup.py @@ -294,7 +294,7 @@ def do_setup(): 'enum34~=1.1.6;python_version<"3.4"', 'flask>=1.0, <2.0', 'flask-appbuilder==1.12.3', - 'flask-admin==1.5.2', + 'flask-admin==1.5.3', 'flask-caching>=1.3.3, <1.4.0', 'flask-login>=0.3, <0.5', 'flask-swagger==0.2.13', From c7d6c1ad89c91c9b3ca1feef935ecec603989a39 Mon Sep 17 00:00:00 2001 From: Xiaodong Date: Thu, 21 Feb 2019 08:59:44 +0800 Subject: [PATCH 0119/1104] [AIRFLOW-3905] Allow using "parameters" in SqlSensor (#4723) * [AIRFLOW-3905] Allow 'parameters' in SqlSensor * Add check on conn_type & add test Not all SQL-related connections are supported by SqlSensor, due to limitation in Connection model and hook implementation. --- airflow/sensors/sql_sensor.py | 22 ++++++++++++++---- tests/sensors/test_sql_sensor.py | 39 ++++++++++++++++++++++++++++---- 2 files changed, 52 insertions(+), 9 deletions(-) diff --git a/airflow/sensors/sql_sensor.py b/airflow/sensors/sql_sensor.py index c304ff594cfc7..6f942c4f3b8b6 100644 --- a/airflow/sensors/sql_sensor.py +++ b/airflow/sensors/sql_sensor.py @@ -19,6 +19,7 @@ from builtins import str +from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook from airflow.sensors.base_sensor_operator import BaseSensorOperator from airflow.utils.decorators import apply_defaults @@ -34,22 +35,33 @@ class SqlSensor(BaseSensorOperator): :param sql: The sql to run. To pass, it needs to return at least one cell that contains a non-zero / empty string value. :type sql: str + :param parameters: The parameters to render the SQL query with (optional). + :type parameters: mapping or iterable """ template_fields = ('sql',) template_ext = ('.hql', '.sql',) ui_color = '#7c7287' @apply_defaults - def __init__(self, conn_id, sql, *args, **kwargs): - self.sql = sql + def __init__(self, conn_id, sql, parameters=None, *args, **kwargs): self.conn_id = conn_id + self.sql = sql + self.parameters = parameters super(SqlSensor, self).__init__(*args, **kwargs) def poke(self, context): - hook = BaseHook.get_connection(self.conn_id).get_hook() + conn = BaseHook.get_connection(self.conn_id) + + allowed_conn_type = {'google_cloud_platform', 'jdbc', 'mssql', + 'mysql', 'oracle', 'postgres', + 'presto', 'sqlite', 'vertica'} + if conn.conn_type not in allowed_conn_type: + raise AirflowException("The connection type is not supported by SqlSensor. " + + "Supported connection types: {}".format(list(allowed_conn_type))) + hook = conn.get_hook() - self.log.info('Poking: %s', self.sql) - records = hook.get_records(self.sql) + self.log.info('Poking: %s (with parameters %s)', self.sql, self.parameters) + records = hook.get_records(self.sql, self.parameters) if not records: return False return str(records[0][0]) not in ('0', '') diff --git a/tests/sensors/test_sql_sensor.py b/tests/sensors/test_sql_sensor.py index 03ea115356ee5..4629a7d01eba7 100644 --- a/tests/sensors/test_sql_sensor.py +++ b/tests/sensors/test_sql_sensor.py @@ -21,6 +21,7 @@ from airflow import DAG from airflow import configuration +from airflow.exceptions import AirflowException from airflow.sensors.sql_sensor import SqlSensor from airflow.utils.timezone import datetime @@ -40,27 +41,56 @@ def setUp(self): } self.dag = DAG(TEST_DAG_ID, default_args=args) + def test_unsupported_conn_type(self): + t = SqlSensor( + task_id='sql_sensor_check', + conn_id='redis_default', + sql="SELECT count(1) FROM INFORMATION_SCHEMA.TABLES", + dag=self.dag + ) + + with self.assertRaises(AirflowException): + t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) + @unittest.skipUnless( 'mysql' in configuration.conf.get('core', 'sql_alchemy_conn'), "this is a mysql test") def test_sql_sensor_mysql(self): - t = SqlSensor( + t1 = SqlSensor( task_id='sql_sensor_check', conn_id='mysql_default', sql="SELECT count(1) FROM INFORMATION_SCHEMA.TABLES", dag=self.dag ) - t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) + t1.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) + + t2 = SqlSensor( + task_id='sql_sensor_check', + conn_id='mysql_default', + sql="SELECT count(%s) FROM INFORMATION_SCHEMA.TABLES", + parameters=["table_name"], + dag=self.dag + ) + t2.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) @unittest.skipUnless( 'postgresql' in configuration.conf.get('core', 'sql_alchemy_conn'), "this is a postgres test") def test_sql_sensor_postgres(self): - t = SqlSensor( + t1 = SqlSensor( task_id='sql_sensor_check', conn_id='postgres_default', sql="SELECT count(1) FROM INFORMATION_SCHEMA.TABLES", dag=self.dag ) - t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) + t1.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) + + t2 = SqlSensor( + task_id='sql_sensor_check', + conn_id='postgres_default', + sql="SELECT count(%s) FROM INFORMATION_SCHEMA.TABLES", + parameters=["table_name"], + dag=self.dag + ) + t2.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) @mock.patch('airflow.sensors.sql_sensor.BaseHook') def test_sql_sensor_postgres_poke(self, mock_hook): @@ -70,6 +100,7 @@ def test_sql_sensor_postgres_poke(self, mock_hook): sql="SELECT 1", ) + mock_hook.get_connection('postgres_default').conn_type = "postgres" mock_get_records = mock_hook.get_connection.return_value.get_hook.return_value.get_records mock_get_records.return_value = [] From f4277cb32a3b75591ed6decb9f8d6c33f60986be Mon Sep 17 00:00:00 2001 From: raman Date: Thu, 21 Feb 2019 15:37:12 +0530 Subject: [PATCH 0120/1104] [AIRFLOW-3865] Add API endpoint to get python code of dag by id (#4687) --- airflow/api/common/experimental/get_code.py | 42 ++++++++++++++++++++ airflow/www/api/experimental/endpoints.py | 14 +++++++ tests/www/api/experimental/test_endpoints.py | 14 +++++++ 3 files changed, 70 insertions(+) create mode 100644 airflow/api/common/experimental/get_code.py diff --git a/airflow/api/common/experimental/get_code.py b/airflow/api/common/experimental/get_code.py new file mode 100644 index 0000000000000..f082cb03837da --- /dev/null +++ b/airflow/api/common/experimental/get_code.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from airflow.exceptions import AirflowException, DagNotFound +from airflow import models, settings +from airflow.www import utils as wwwutils + + +def get_code(dag_id): + """Return python code of a given dag_id.""" + session = settings.Session() + DM = models.DagModel + dag = session.query(DM).filter(DM.dag_id == dag_id).first() + session.close() + # Check DAG exists. + if dag is None: + error_message = "Dag id {} not found".format(dag_id) + raise DagNotFound(error_message) + + try: + with wwwutils.open_maybe_zipped(dag.fileloc, 'r') as f: + code = f.read() + return code + except IOError as e: + error_message = "Error {} while reading Dag id {} Code".format(str(e), dag_id) + raise AirflowException(error_message) diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py index facc8aa2a138f..13bc0ea929a3f 100644 --- a/airflow/www/api/experimental/endpoints.py +++ b/airflow/www/api/experimental/endpoints.py @@ -23,6 +23,7 @@ from airflow.api.common.experimental.get_dag_runs import get_dag_runs from airflow.api.common.experimental.get_task import get_task from airflow.api.common.experimental.get_task_instance import get_task_instance +from airflow.api.common.experimental.get_code import get_code from airflow.api.common.experimental.get_dag_run_state import get_dag_run_state from airflow.exceptions import AirflowException from airflow.utils.log.logging_mixin import LoggingMixin @@ -136,6 +137,19 @@ def test(): return jsonify(status='OK') +@api_experimental.route('/dags//code', methods=['GET']) +@requires_authentication +def get_dag_code(dag_id): + """Return python code of a given dag_id.""" + try: + return get_code(dag_id) + except AirflowException as err: + _log.info(err) + response = jsonify(error="{}".format(err)) + response.status_code = err.status_code + return response + + @api_experimental.route('/dags//tasks/', methods=['GET']) @requires_authentication def task_info(dag_id, task_id): diff --git a/tests/www/api/experimental/test_endpoints.py b/tests/www/api/experimental/test_endpoints.py index 97de5ffb4b4eb..90335347f1ff8 100644 --- a/tests/www/api/experimental/test_endpoints.py +++ b/tests/www/api/experimental/test_endpoints.py @@ -89,6 +89,20 @@ def test_task_info(self): self.assertIn('error', response.data.decode('utf-8')) self.assertEqual(404, response.status_code) + def test_get_dag_code(self): + url_template = '/api/experimental/dags/{}/code' + + response = self.client.get( + url_template.format('example_bash_operator') + ) + self.assertIn('BashOperator(', response.data.decode('utf-8')) + self.assertEqual(200, response.status_code) + + response = self.client.get( + url_template.format('xyz') + ) + self.assertEqual(404, response.status_code) + def test_task_paused(self): url_template = '/api/experimental/dags/{}/paused/{}' From 5c170f05944236cca961dd14e82fd22c327bf286 Mon Sep 17 00:00:00 2001 From: Ryan Yuan Date: Thu, 21 Feb 2019 21:50:05 +1100 Subject: [PATCH 0121/1104] [AIRFLOW-3933] Fix various typos (#4747) Fix typos --- CONTRIBUTING.md | 2 +- UPDATING.md | 4 ++-- airflow/contrib/example_dags/example_qubole_operator.py | 4 ++-- airflow/contrib/hooks/azure_data_lake_hook.py | 2 +- airflow/contrib/hooks/fs_hook.py | 2 +- airflow/contrib/hooks/qubole_hook.py | 2 +- airflow/contrib/hooks/salesforce_hook.py | 2 +- airflow/contrib/operators/awsbatch_operator.py | 2 +- airflow/contrib/operators/bigquery_check_operator.py | 2 +- airflow/contrib/operators/cassandra_to_gcs.py | 4 ++-- airflow/contrib/operators/dataflow_operator.py | 2 +- airflow/contrib/operators/dataproc_operator.py | 2 +- airflow/contrib/operators/druid_operator.py | 2 +- airflow/operators/druid_check_operator.py | 2 +- airflow/operators/presto_check_operator.py | 2 +- airflow/www/app.py | 2 +- dev/airflow-pr | 2 +- docs/howto/operator.rst | 2 +- docs/installation.rst | 2 +- docs/project.rst | 2 +- docs/security.rst | 2 +- 21 files changed, 24 insertions(+), 24 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bc96bdc6f5239..22666faf715da 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -264,7 +264,7 @@ meets these guidelines: 1. The pull request should include tests, either as doctests, unit tests, or both. The airflow repo uses [Travis CI](https://travis-ci.org/apache/airflow) to run the tests and [codecov](https://codecov.io/gh/apache/airflow) to track coverage. You can set up both for free on your fork (see the "Testing on Travis CI" section below). It will help you making sure you do not break the build with your PR and that you help increase coverage. 1. Please [rebase your fork](http://stackoverflow.com/a/7244456/1110993), squash commits, and resolve all conflicts. 1. Every pull request should have an associated [JIRA](https://issues.apache.org/jira/browse/AIRFLOW/?selectedTab=com.atlassian.jira.jira-projects-plugin:summary-panel). The JIRA link should also be contained in the PR description. -1. Preface your commit's subject & PR's title with **[AIRFLOW-XXX]** where *XXX* is the JIRA number. We compose release notes (i.e. for Airflow releases) from all commit titles in a release. By placing the JIRA number in the commit title and hence in the release notes, Airflow users can look into JIRA and Github PRs for more details about a particular change. +1. Preface your commit's subject & PR's title with **[AIRFLOW-XXX]** where *XXX* is the JIRA number. We compose release notes (i.e. for Airflow releases) from all commit titles in a release. By placing the JIRA number in the commit title and hence in the release notes, Airflow users can look into JIRA and GitHub PRs for more details about a particular change. 1. Add an [Apache License](http://www.apache.org/legal/src-headers.html) header to all new files 1. If the pull request adds functionality, the docs should be updated as part of the same PR. Doc string are often sufficient. Make sure to follow the Sphinx compatible standards. 1. The pull request should work for Python 2.7 and 3.5. If you need help writing code that works in both Python 2 and 3, see the documentation at the [Python-Future project](http://python-future.org) (the future package is an Airflow requirement and should be used where possible). diff --git a/UPDATING.md b/UPDATING.md index ac3e38a35eb75..512b66bbd967e 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -38,7 +38,7 @@ Sensors are now accessible via `airflow.sensors` and no longer via `airflow.oper For example: `from airflow.operators.sensors import BaseSensorOperator` becomes `from airflow.sensors.base_sensor_operator import BaseSensorOperator` -### Renamed "extra" requirments for cloud providers +### Renamed "extra" requirements for cloud providers Subpackages for specific services have been combined into one variant for each cloud provider. @@ -191,7 +191,7 @@ that he has permissions on. If a new role wants to access all the dags, the admi We also provide a new cli command(``sync_perm``) to allow admin to auto sync permissions. ### Modification to `ts_nodash` macro -`ts_nodash` previously contained TimeZone information alongwith execution date. For Example: `20150101T000000+0000`. This is not user-friendly for file or folder names which was a popular use case for `ts_nodash`. Hence this behavior has been changed and using `ts_nodash` will no longer contain TimeZone information, restoring the pre-1.10 behavior of this macro. And a new macro `ts_nodash_with_tz` has been added which can be used to get a string with execution date and timezone info without dashes. +`ts_nodash` previously contained TimeZone information along with execution date. For Example: `20150101T000000+0000`. This is not user-friendly for file or folder names which was a popular use case for `ts_nodash`. Hence this behavior has been changed and using `ts_nodash` will no longer contain TimeZone information, restoring the pre-1.10 behavior of this macro. And a new macro `ts_nodash_with_tz` has been added which can be used to get a string with execution date and timezone info without dashes. Examples: * `ts_nodash`: `20150101T000000` diff --git a/airflow/contrib/example_dags/example_qubole_operator.py b/airflow/contrib/example_dags/example_qubole_operator.py index 826a50af99cd9..5f77d09ba1442 100644 --- a/airflow/contrib/example_dags/example_qubole_operator.py +++ b/airflow/contrib/example_dags/example_qubole_operator.py @@ -65,7 +65,7 @@ def compare_result(ds, **kwargs): fetch_logs=True, # If `fetch_logs`=true, will fetch qubole command logs and concatenate # them into corresponding airflow task logs - tags='aiflow_example_run', + tags='airflow_example_run', # To attach tags to qubole command, auto attach 3 tags - dag_id, task_id, run_id qubole_conn_id='qubole_default', # Connection id to submit commands inside QDS, if not set "qubole_default" is used @@ -220,7 +220,7 @@ def main(args: Array[String]) { program=prog, language='scala', arguments='--class SparkPi', - tags='aiflow_example_run', + tags='airflow_example_run', dag=dag) t11.set_upstream(branching) diff --git a/airflow/contrib/hooks/azure_data_lake_hook.py b/airflow/contrib/hooks/azure_data_lake_hook.py index 21787382209c6..9eb7af7f8a71e 100644 --- a/airflow/contrib/hooks/azure_data_lake_hook.py +++ b/airflow/contrib/hooks/azure_data_lake_hook.py @@ -77,7 +77,7 @@ def upload_file(self, local_path, remote_path, nthreads=64, overwrite=True, are not supported. :type local_path: str :param remote_path: Remote path to upload to; if multiple files, this is the - dircetory root to write within. + directory root to write within. :type remote_path: str :param nthreads: Number of threads to use. If None, uses the number of cores. :type nthreads: int diff --git a/airflow/contrib/hooks/fs_hook.py b/airflow/contrib/hooks/fs_hook.py index 6832f20c225c1..1aa528b6205dc 100644 --- a/airflow/contrib/hooks/fs_hook.py +++ b/airflow/contrib/hooks/fs_hook.py @@ -30,7 +30,7 @@ class FSHook(BaseHook): example: Conn Id: fs_test Conn Type: File (path) - Host, Shchema, Login, Password, Port: empty + Host, Schema, Login, Password, Port: empty Extra: {"path": "/tmp"} """ diff --git a/airflow/contrib/hooks/qubole_hook.py b/airflow/contrib/hooks/qubole_hook.py index 1c98f26afcd00..df11a50d5d8d3 100755 --- a/airflow/contrib/hooks/qubole_hook.py +++ b/airflow/contrib/hooks/qubole_hook.py @@ -194,7 +194,7 @@ def get_jobs_id(self, ti): """ Get jobs associated with a Qubole commands :param ti: Task Instance of the dag, used to determine the Quboles command id - :return: Job informations assoiciated with command + :return: Job information associated with command """ if self.cmd is None: cmd_id = ti.xcom_pull(key="qbol_cmd_id", task_ids=self.task_id) diff --git a/airflow/contrib/hooks/salesforce_hook.py b/airflow/contrib/hooks/salesforce_hook.py index ba5c7e8d9a4d8..a1756b6530b6a 100644 --- a/airflow/contrib/hooks/salesforce_hook.py +++ b/airflow/contrib/hooks/salesforce_hook.py @@ -276,7 +276,7 @@ def write_object_to_file( schema = self.describe_object(object_name) - # possible columns that can be convereted to timestamps + # possible columns that can be converted to timestamps # are the ones that are either date or datetime types # strings are too general and we risk unintentional conversion possible_timestamp_cols = [ diff --git a/airflow/contrib/operators/awsbatch_operator.py b/airflow/contrib/operators/awsbatch_operator.py index 3c778e6e685cc..baf54603ac157 100644 --- a/airflow/contrib/operators/awsbatch_operator.py +++ b/airflow/contrib/operators/awsbatch_operator.py @@ -33,7 +33,7 @@ class AWSBatchOperator(BaseOperator): """ Execute a job on AWS Batch Service - .. warning: the queue parameter was renamed to job_queue to segreggate the + .. warning: the queue parameter was renamed to job_queue to segregate the internal CeleryExecutor queue from the AWS Batch internal queue. :param job_name: the name for the job that will run on AWS Batch (templated) diff --git a/airflow/contrib/operators/bigquery_check_operator.py b/airflow/contrib/operators/bigquery_check_operator.py index 247a1ae7fba1b..afb600a3d9120 100644 --- a/airflow/contrib/operators/bigquery_check_operator.py +++ b/airflow/contrib/operators/bigquery_check_operator.py @@ -48,7 +48,7 @@ class BigQueryCheckOperator(CheckOperator): This operator can be used as a data quality check in your pipeline, and depending on where you put it in your DAG, you have the choice to stop the critical path, preventing from - publishing dubious data, or on the side and receive email alterts + publishing dubious data, or on the side and receive email alerts without stopping the progress of the DAG. :param sql: the sql to be executed diff --git a/airflow/contrib/operators/cassandra_to_gcs.py b/airflow/contrib/operators/cassandra_to_gcs.py index 95107a497fd61..6819eca404ebb 100644 --- a/airflow/contrib/operators/cassandra_to_gcs.py +++ b/airflow/contrib/operators/cassandra_to_gcs.py @@ -266,7 +266,7 @@ def convert_tuple_type(cls, name, value): """ Converts a tuple to RECORD that contains n fields, each will be converted to its corresponding data type in bq and will be named 'field_', where - index is determined by the order of the tuple elments defined in cassandra. + index is determined by the order of the tuple elements defined in cassandra. """ names = ['field_' + str(i) for i in range(len(value))] values = [cls.convert_value(name, value) for name, value in zip(names, value)] @@ -276,7 +276,7 @@ def convert_tuple_type(cls, name, value): def convert_map_type(cls, name, value): """ Converts a map to a repeated RECORD that contains two fields: 'key' and 'value', - each will be converted to its corresopnding data type in BQ. + each will be converted to its corresponding data type in BQ. """ converted_map = [] for k, v in zip(value.keys(), value.values()): diff --git a/airflow/contrib/operators/dataflow_operator.py b/airflow/contrib/operators/dataflow_operator.py index 0f7ead15d6293..e880642f6067c 100644 --- a/airflow/contrib/operators/dataflow_operator.py +++ b/airflow/contrib/operators/dataflow_operator.py @@ -92,7 +92,7 @@ class DataFlowJavaOperator(BaseOperator): Cloud Platform for the dataflow job status while the job is in the JOB_STATE_RUNNING state. :type poll_sleep: int - :param job_class: The name of the dataflow job class to be executued, it + :param job_class: The name of the dataflow job class to be executed, it is often not the main class configured in the dataflow jar file. :type job_class: str diff --git a/airflow/contrib/operators/dataproc_operator.py b/airflow/contrib/operators/dataproc_operator.py index 8ff26969e32b5..e64cd25ef534f 100644 --- a/airflow/contrib/operators/dataproc_operator.py +++ b/airflow/contrib/operators/dataproc_operator.py @@ -1376,7 +1376,7 @@ def execute(self, context): self.hook.wait(self.start()) def start(self, context): - raise AirflowException('plese start a workflow operation') + raise AirflowException('Please start a workflow operation') class DataprocWorkflowTemplateInstantiateOperator(DataprocWorkflowTemplateBaseOperator): diff --git a/airflow/contrib/operators/druid_operator.py b/airflow/contrib/operators/druid_operator.py index 1436d99f28d6f..75d552fec5a5b 100644 --- a/airflow/contrib/operators/druid_operator.py +++ b/airflow/contrib/operators/druid_operator.py @@ -60,5 +60,5 @@ def execute(self, context): druid_ingest_conn_id=self.conn_id, max_ingestion_time=self.max_ingestion_time ) - self.log.info("Sumitting %s", self.index_spec_str) + self.log.info("Submitting %s", self.index_spec_str) hook.submit_indexing_job(self.index_spec_str) diff --git a/airflow/operators/druid_check_operator.py b/airflow/operators/druid_check_operator.py index 39674fdd3983e..514f61fc88988 100644 --- a/airflow/operators/druid_check_operator.py +++ b/airflow/operators/druid_check_operator.py @@ -47,7 +47,7 @@ class DruidCheckOperator(CheckOperator): This operator can be used as a data quality check in your pipeline, and depending on where you put it in your DAG, you have the choice to stop the critical path, preventing from - publishing dubious data, or on the side and receive email alterts + publishing dubious data, or on the side and receive email alerts without stopping the progress of the DAG. :param sql: the sql to be executed diff --git a/airflow/operators/presto_check_operator.py b/airflow/operators/presto_check_operator.py index 16f5bc0212a15..d70dcaa7d25af 100644 --- a/airflow/operators/presto_check_operator.py +++ b/airflow/operators/presto_check_operator.py @@ -48,7 +48,7 @@ class PrestoCheckOperator(CheckOperator): This operator can be used as a data quality check in your pipeline, and depending on where you put it in your DAG, you have the choice to stop the critical path, preventing from - publishing dubious data, or on the side and receive email alterts + publishing dubious data, or on the side and receive email alerts without stopping the progress of the DAG. :param sql: the sql to be executed diff --git a/airflow/www/app.py b/airflow/www/app.py index fde99743c58fd..ca82175bc6aab 100644 --- a/airflow/www/app.py +++ b/airflow/www/app.py @@ -135,7 +135,7 @@ def init_views(appbuilder): href='https://airflow.apache.org/', category="Docs", category_icon="fa-cube") - appbuilder.add_link("Github", + appbuilder.add_link("GitHub", href='https://github.com/apache/airflow', category="Docs") appbuilder.add_link('Version', diff --git a/dev/airflow-pr b/dev/airflow-pr index 42e01cc9e8ee8..641f0119e2023 100755 --- a/dev/airflow-pr +++ b/dev/airflow-pr @@ -63,7 +63,7 @@ AIRFLOW_GIT_LOCATION = os.environ.get( "AIRFLOW_GIT", os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) -# Remote name which points to the Gihub site +# Remote name which points to the GitHub site GITHUB_REMOTE_NAME = os.environ.get("GITHUB_REMOTE_NAME", "github") # OAuth key used for issuing requests against the GitHub API. If this is not # defined, then requests will be unauthenticated. You should only need to diff --git a/docs/howto/operator.rst b/docs/howto/operator.rst index 65a82310c3dc6..34752383a3774 100644 --- a/docs/howto/operator.rst +++ b/docs/howto/operator.rst @@ -534,7 +534,7 @@ it will be retrieved from the GCP connection used. Both variants are shown: Advanced """""""" -When creating a table, you can specify the optional ``initial_split_keys`` and ``column_familes``. +When creating a table, you can specify the optional ``initial_split_keys`` and ``column_families``. Please refer to the Python Client for Google Cloud Bigtable documentation `for Table `_ and `for Column Families `_. diff --git a/docs/installation.rst b/docs/installation.rst index 11523180314a8..5db7f15fae84e 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -75,7 +75,7 @@ Here's the list of the subpackages and what they enable: | gcp_api | ``pip install apache-airflow[gcp_api]`` | Google Cloud Platform hooks and operators | | | | (using ``google-api-python-client``) | +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ -| github_enterprise | ``pip install apache-airflow[github_enterprise]`` | Github Enterprise auth backend | +| github_enterprise | ``pip install apache-airflow[github_enterprise]`` | GitHub Enterprise auth backend | +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ | google_auth | ``pip install apache-airflow[google_auth]`` | Google auth backend | +---------------------+---------------------------------------------------+----------------------------------------------------------------------+ diff --git a/docs/project.rst b/docs/project.rst index 7d91077488c17..d480c79491f8c 100644 --- a/docs/project.rst +++ b/docs/project.rst @@ -23,7 +23,7 @@ History Airflow was started in October 2014 by Maxime Beauchemin at Airbnb. It was open source from the very first commit and officially brought under -the Airbnb Github and announced in June 2015. +the Airbnb GitHub and announced in June 2015. The project joined the Apache Software Foundation's incubation program in March 2016. diff --git a/docs/security.rst b/docs/security.rst index d23e84e43feef..5e1a2b2713382 100644 --- a/docs/security.rst +++ b/docs/security.rst @@ -502,7 +502,7 @@ on limited web views 'LogModelView', 'Docs', 'Documentation', - 'Github', + 'GitHub', 'About', 'Version', 'VersionView', From 5b76051ccd5e289091f0a89a65f772595c8ada3b Mon Sep 17 00:00:00 2001 From: Felix Date: Thu, 21 Feb 2019 12:52:05 +0100 Subject: [PATCH 0122/1104] [AIRFLOW-XXX] Add Slack Badge to Readme (#4750) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 8646af784d179..7ceb9b8a33b96 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,7 @@ under the License. [![License](http://img.shields.io/:license-Apache%202-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/apache-airflow.svg)](https://pypi.org/project/apache-airflow/) [![Twitter Follow](https://img.shields.io/twitter/follow/ApacheAirflow.svg?style=social&label=Follow)](https://twitter.com/ApacheAirflow) +[![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://apache-airflow-slack.herokuapp.com/) Apache Airflow (or simply Airflow) is a platform to programmatically author, schedule, and monitor workflows. From d46a137ec32eb81a6a2874f62a9623d0bee12c57 Mon Sep 17 00:00:00 2001 From: stefan-wolfsheimer Date: Thu, 21 Feb 2019 16:19:45 +0100 Subject: [PATCH 0123/1104] [AIRFLOW-3683] Fix formatting of error message for invalid TriggerRule (#4490) --- airflow/models/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 51f423e827b6a..90b184249174d 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -2113,7 +2113,7 @@ def __init__( raise AirflowException( "The trigger_rule must be one of {all_triggers}," "'{d}.{t}'; received '{tr}'." - .format(all_triggers=TriggerRule.all_triggers, + .format(all_triggers=TriggerRule.all_triggers(), d=dag.dag_id if dag else "", t=task_id, tr=trigger_rule)) self.trigger_rule = trigger_rule From 4745910903d87210e537761c3bbeb9c0cf7542bb Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Thu, 21 Feb 2019 11:49:38 -0800 Subject: [PATCH 0124/1104] [AIRFLOW-3153] Send DAG processing stats to statsd (#4748) Add 2 stats under the `airflow.dag_processing` namespace. The metric names follow the template: `dag_processing..`, where `` is the name of a file in the dag_folder (without the extension) and `` is one of the following: - `last_runtime`: the number of seconds it took to process the DAG file on the most recent iteration - `last_run.seconds_ago`: the number of seconds that have elapsed since the DAG file was last processed I've verified the logging by running the scheduler on the examples DAGs and logging the value of the gauges with netcat: $ nc -u -l -p 8125 | tr '|' '\n' | grep dag_processing gairflow.dag_processing.last_runtime.example_docker_operator:2.002253 gairflow.dag_processing.last_run.seconds_ago.example_docker_operator:18.066831 gairflow.dag_processing.last_runtime.tutorial:2.001403 gairflow.dag_processing.last_run.seconds_ago.tutorial:36.114995 gairflow.dag_processing.last_runtime.docker_copy_data:2.003188 gairflow.dag_processing.last_run.seconds_ago.docker_copy_data:28.097275 --- airflow/utils/dag_processing.py | 16 +++++++++++++++- docs/metrics.rst | 16 +++++++++------- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index 1473c016c7288..fa794fefc2c69 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -47,7 +47,7 @@ from airflow.dag.base_dag import BaseDag, BaseDagBag from airflow.exceptions import AirflowException from airflow.models import errors -from airflow.settings import logging_class_path +from airflow.settings import logging_class_path, Stats from airflow.utils import timezone from airflow.utils.db import provide_session from airflow.utils.log.logging_mixin import LoggingMixin @@ -966,11 +966,25 @@ def _log_file_processing_stats(self, known_file_paths): rows = [] for file_path in known_file_paths: last_runtime = self.get_last_runtime(file_path) + file_name = os.path.basename(file_path) + file_name = os.path.splitext(file_name)[0].replace(os.sep, '.') + if last_runtime: + Stats.gauge( + 'dag_processing.last_runtime.{}'.format(file_name), + last_runtime + ) + processor_pid = self.get_pid(file_path) processor_start_time = self.get_start_time(file_path) runtime = ((timezone.utcnow() - processor_start_time).total_seconds() if processor_start_time else None) last_run = self.get_last_finish_time(file_path) + if last_run: + seconds_ago = (timezone.utcnow() - last_run).total_seconds() + Stats.gauge( + 'dag_processing.last_run.seconds_ago.{}'.format(file_name), + seconds_ago + ) rows.append((file_path, processor_pid, diff --git a/docs/metrics.rst b/docs/metrics.rst index 29819c03e6ebe..0bd70fce32a61 100644 --- a/docs/metrics.rst +++ b/docs/metrics.rst @@ -49,13 +49,15 @@ scheduler_heartbeat Scheduler heartbeats Gauges ------ -===================== ===================================== -Name Description -===================== ===================================== -collect_dags Seconds taken to scan and import DAGs -dagbag_import_errors DAG import errors -dagbag_size DAG bag size -===================== ===================================== +=============================================== ======================================================================== +Name Description +=============================================== ======================================================================== +collect_dags Seconds taken to scan and import DAGs +dagbag_import_errors DAG import errors +dagbag_size DAG bag size +dag_processing.last_runtime. Seconds spent processing (in most recent iteration) +dag_processing.last_run.seconds_ago. Seconds since was last processed +=============================================== ======================================================================== Timers ------ From 240138f416ce7376071f5f20ac99c2c60e8f9697 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Bregu=C5=82a?= Date: Fri, 22 Feb 2019 12:25:13 +0100 Subject: [PATCH 0125/1104] [AIRFLOW-3925] Don't pull docker-images on pretest (#4740) Co-authored-by: Joshua Carp --- .travis.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 291b5e229a0ba..59e87d7fb192d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,13 +44,16 @@ jobs: include: - name: Flake8 stage: pre-test - script: pip install flake8 && flake8 + install: pip install flake8 + script: flake8 - name: Check license header stage: pre-test + install: skip script: scripts/ci/6-check-license.sh - name: Check docs stage: pre-test - script: pip install -e .[doc] && docs/build.sh + install: pip install -e .[doc] + script: docs/build.sh cache: directories: From ce499bb27709d5b9d623a83da1925b779c4b53e1 Mon Sep 17 00:00:00 2001 From: Szymon Przedwojski Date: Fri, 22 Feb 2019 15:56:53 +0100 Subject: [PATCH 0126/1104] [AIRFLOW-3701] Add Google Cloud Vision Product Search operators (#4665) --- .../example_dags/example_gcp_vision.py | 245 +++++++ airflow/contrib/hooks/gcp_vision_hook.py | 356 +++++++++ .../contrib/operators/gcp_vision_operator.py | 673 ++++++++++++++++++ docs/code.rst | 9 + docs/howto/operator.rst | 592 +++++++++++++++ docs/integration.rst | 25 + setup.py | 1 + tests/contrib/hooks/test_gcp_vision_hook.py | 548 ++++++++++++++ .../operators/test_gcp_vision_operator.py | 234 ++++++ .../test_gcp_vision_operator_system.py | 34 + tests/contrib/utils/gcp_authenticator.py | 1 + 11 files changed, 2718 insertions(+) create mode 100644 airflow/contrib/example_dags/example_gcp_vision.py create mode 100644 airflow/contrib/hooks/gcp_vision_hook.py create mode 100644 airflow/contrib/operators/gcp_vision_operator.py create mode 100644 tests/contrib/hooks/test_gcp_vision_hook.py create mode 100644 tests/contrib/operators/test_gcp_vision_operator.py create mode 100644 tests/contrib/operators/test_gcp_vision_operator_system.py diff --git a/airflow/contrib/example_dags/example_gcp_vision.py b/airflow/contrib/example_dags/example_gcp_vision.py new file mode 100644 index 0000000000000..9ca96012ebb2d --- /dev/null +++ b/airflow/contrib/example_dags/example_gcp_vision.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Example Airflow DAG that creates, gets, updates and deletes Products and Product Sets in the Google Cloud +Vision service in the Google Cloud Platform. + +This DAG relies on the following OS environment variables + +* GCP_VISION_LOCATION - Google Cloud Platform zone where the instance exists. +""" +import os + +# [START howto_operator_vision_retry_import] +from google.api_core.retry import Retry +# [END howto_operator_vision_retry_import] +# [START howto_operator_vision_productset_import] +from google.cloud.vision_v1.proto.product_search_service_pb2 import ProductSet +# [END howto_operator_vision_productset_import] +# [START howto_operator_vision_product_import] +from google.cloud.vision_v1.proto.product_search_service_pb2 import Product +# [END howto_operator_vision_product_import] + +import airflow +from airflow import models + +from airflow.contrib.operators.gcp_vision_operator import ( + CloudVisionProductSetCreateOperator, + CloudVisionProductSetGetOperator, + CloudVisionProductSetUpdateOperator, + CloudVisionProductSetDeleteOperator, + CloudVisionProductCreateOperator, + CloudVisionProductGetOperator, + CloudVisionProductUpdateOperator, + CloudVisionProductDeleteOperator, +) + +default_args = {'start_date': airflow.utils.dates.days_ago(1)} + +# [START howto_operator_vision_args_common] +GCP_VISION_LOCATION = os.environ.get('GCP_VISION_LOCATION', 'europe-west1') +# [END howto_operator_vision_args_common] + +# [START howto_operator_vision_productset] +product_set = ProductSet(display_name='My Product Set 1') +# [END howto_operator_vision_productset] + +# [START howto_operator_vision_product] +product = Product(display_name='My Product 1', product_category='toys') +# [END howto_operator_vision_product] + +# [START howto_operator_vision_productset_explicit_id] +GCP_VISION_PRODUCT_SET_ID = os.environ.get('GCP_VISION_PRODUCT_SET_ID', 'product_set_explicit_id') +# [END howto_operator_vision_productset_explicit_id] + +# [START howto_operator_vision_product_explicit_id] +GCP_VISION_PRODUCT_ID = os.environ.get('GCP_VISION_PRODUCT_ID', 'product_explicit_id') +# [END howto_operator_vision_product_explicit_id] + +with models.DAG( + 'example_gcp_vision', default_args=default_args, schedule_interval=None # Override to match your needs +) as dag: + # ################################## # + # ### Autogenerated IDs examples ### # + # ################################## # + + # [START howto_operator_vision_product_set_create] + product_set_create = CloudVisionProductSetCreateOperator( + location=GCP_VISION_LOCATION, + product_set=product_set, + retry=Retry(maximum=10.0), + timeout=5, + task_id='product_set_create', + ) + # [END howto_operator_vision_product_set_create] + + # [START howto_operator_vision_product_set_get] + product_set_get = CloudVisionProductSetGetOperator( + location=GCP_VISION_LOCATION, + product_set_id="{{ task_instance.xcom_pull('product_set_create') }}", + task_id='product_set_get', + ) + # [END howto_operator_vision_product_set_get] + + # [START howto_operator_vision_product_set_update] + product_set_update = CloudVisionProductSetUpdateOperator( + location=GCP_VISION_LOCATION, + product_set_id="{{ task_instance.xcom_pull('product_set_create') }}", + product_set=ProductSet(display_name='My Product Set 2'), + task_id='product_set_update', + ) + # [END howto_operator_vision_product_set_update] + + # [START howto_operator_vision_product_set_delete] + product_set_delete = CloudVisionProductSetDeleteOperator( + location=GCP_VISION_LOCATION, + product_set_id="{{ task_instance.xcom_pull('product_set_create') }}", + task_id='product_set_delete', + ) + # [END howto_operator_vision_product_set_delete] + + # [START howto_operator_vision_product_create] + product_create = CloudVisionProductCreateOperator( + location=GCP_VISION_LOCATION, + product=product, + retry=Retry(maximum=10.0), + timeout=5, + task_id='product_create', + ) + # [END howto_operator_vision_product_create] + + # [START howto_operator_vision_product_get] + product_get = CloudVisionProductGetOperator( + location=GCP_VISION_LOCATION, + product_id="{{ task_instance.xcom_pull('product_create') }}", + task_id='product_get', + ) + # [END howto_operator_vision_product_get] + + # [START howto_operator_vision_product_update] + product_update = CloudVisionProductUpdateOperator( + location=GCP_VISION_LOCATION, + product_id="{{ task_instance.xcom_pull('product_create') }}", + product=Product(display_name='My Product 2', description='My updated description'), + task_id='product_update', + ) + # [END howto_operator_vision_product_update] + + # [START howto_operator_vision_product_delete] + product_delete = CloudVisionProductDeleteOperator( + location=GCP_VISION_LOCATION, + product_id="{{ task_instance.xcom_pull('product_create') }}", + task_id='product_delete', + ) + # [END howto_operator_vision_product_delete] + + product_set_create >> product_set_get >> product_set_update >> product_set_delete + product_create >> product_get >> product_update >> product_delete + + # ############################# # + # ### Explicit IDs examples ### # + # ############################# # + + # [START howto_operator_vision_product_set_create_2] + product_set_create_2 = CloudVisionProductSetCreateOperator( + product_set_id=GCP_VISION_PRODUCT_SET_ID, + location=GCP_VISION_LOCATION, + product_set=product_set, + retry=Retry(maximum=10.0), + timeout=5, + task_id='product_set_create_2', + ) + # [END howto_operator_vision_product_set_create_2] + + # Second 'create' task with the same product_set_id to demonstrate idempotence + product_set_create_2_idempotence = CloudVisionProductSetCreateOperator( + product_set_id=GCP_VISION_PRODUCT_SET_ID, + location=GCP_VISION_LOCATION, + product_set=product_set, + retry=Retry(maximum=10.0), + timeout=5, + task_id='product_set_create_2_idempotence', + ) + + # [START howto_operator_vision_product_set_get_2] + product_set_get_2 = CloudVisionProductSetGetOperator( + location=GCP_VISION_LOCATION, product_set_id=GCP_VISION_PRODUCT_SET_ID, task_id='product_set_get_2' + ) + # [END howto_operator_vision_product_set_get_2] + + # [START howto_operator_vision_product_set_update_2] + product_set_update_2 = CloudVisionProductSetUpdateOperator( + location=GCP_VISION_LOCATION, + product_set_id=GCP_VISION_PRODUCT_SET_ID, + product_set=ProductSet(display_name='My Product Set 2'), + task_id='product_set_update_2', + ) + # [END howto_operator_vision_product_set_update_2] + + # [START howto_operator_vision_product_set_delete_2] + product_set_delete_2 = CloudVisionProductSetDeleteOperator( + location=GCP_VISION_LOCATION, product_set_id=GCP_VISION_PRODUCT_SET_ID, task_id='product_set_delete_2' + ) + # [END howto_operator_vision_product_set_delete_2] + + # [START howto_operator_vision_product_create_2] + product_create_2 = CloudVisionProductCreateOperator( + product_id=GCP_VISION_PRODUCT_ID, + location=GCP_VISION_LOCATION, + product=product, + retry=Retry(maximum=10.0), + timeout=5, + task_id='product_create_2', + ) + # [END howto_operator_vision_product_create_2] + + # Second 'create' task with the same product_id to demonstrate idempotence + product_create_2_idempotence = CloudVisionProductCreateOperator( + product_id=GCP_VISION_PRODUCT_ID, + location=GCP_VISION_LOCATION, + product=product, + retry=Retry(maximum=10.0), + timeout=5, + task_id='product_create_2_idempotence', + ) + + # [START howto_operator_vision_product_get_2] + product_get_2 = CloudVisionProductGetOperator( + location=GCP_VISION_LOCATION, product_id=GCP_VISION_PRODUCT_ID, task_id='product_get_2' + ) + # [END howto_operator_vision_product_get_2] + + # [START howto_operator_vision_product_update_2] + product_update_2 = CloudVisionProductUpdateOperator( + location=GCP_VISION_LOCATION, + product_id=GCP_VISION_PRODUCT_ID, + product=Product(display_name='My Product 2', description='My updated description'), + task_id='product_update_2', + ) + # [END howto_operator_vision_product_update_2] + + # [START howto_operator_vision_product_delete_2] + product_delete_2 = CloudVisionProductDeleteOperator( + location=GCP_VISION_LOCATION, product_id=GCP_VISION_PRODUCT_ID, task_id='product_delete_2' + ) + # [END howto_operator_vision_product_delete_2] + + product_set_create_2 >> product_set_get_2 >> product_set_update_2 >> product_set_delete_2 + product_create_2 >> product_get_2 >> product_update_2 >> product_delete_2 diff --git a/airflow/contrib/hooks/gcp_vision_hook.py b/airflow/contrib/hooks/gcp_vision_hook.py new file mode 100644 index 0000000000000..099c21972526d --- /dev/null +++ b/airflow/contrib/hooks/gcp_vision_hook.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from copy import deepcopy + +from google.api_core.exceptions import AlreadyExists, GoogleAPICallError, RetryError +from google.cloud.vision_v1 import ProductSearchClient +from google.protobuf.json_format import MessageToDict + +from airflow import AirflowException +from airflow.contrib.hooks.gcp_api_base_hook import GoogleCloudBaseHook + + +class NameDeterminer: + """ + Class used for checking if the entity has the 'name' attribute set. + + - If so, no action is taken. + - If not, and the name can be constructed from other parameters provided, it is created and filled in + the entity. + - If both the entity's 'name' attribute is set and the name can be constructed from other parameters + provided: + - If they are the same: no action is taken. + - If they are different: an exception is thrown. + """ + + def __init__(self, label, id_label, get_path): + self.label = label + self.id_label = id_label + self.get_path = get_path + + def get_entity_with_name(self, entity, entity_id, location, project_id): + entity = deepcopy(entity) + explicit_name = getattr(entity, 'name') + if location and entity_id: + # Necessary parameters to construct the name are present. Checking for conflict with explicit name + constructed_name = self.get_path(project_id, location, entity_id) + if not explicit_name: + entity.name = constructed_name + return entity + elif explicit_name != constructed_name: + self._raise_ex_different_names(constructed_name, explicit_name) + else: + # Not enough parameters to construct the name. Trying to use the name from Product / ProductSet. + if explicit_name: + return entity + else: + self._raise_ex_unable_to_determine_name() + + def _raise_ex_unable_to_determine_name(self): + raise AirflowException( + "Unable to determine the {label} name. Please either set the name directly in the {label} " + "object or provide the `location` and `{id_label}` parameters.".format( + label=self.label, id_label=self.id_label + ) + ) + + def _raise_ex_different_names(self, constructed_name, explicit_name): + raise AirflowException( + "The {label} name provided in the object ({explicit_name}) is different than the name created " + "from the input parameters ({constructed_name}). Please either: 1) Remove the {label} name, 2) " + "Remove the location and {id_label} parameters, 3) Unify the {label} name and input " + "parameters.".format( + label=self.label, + explicit_name=explicit_name, + constructed_name=constructed_name, + id_label=self.id_label, + ) + ) + + +class CloudVisionHook(GoogleCloudBaseHook): + """ + Hook for Google Cloud Vision APIs. + """ + + _client = None + product_name_determiner = NameDeterminer('Product', 'product_id', ProductSearchClient.product_path) + product_set_name_determiner = NameDeterminer( + 'ProductSet', 'productset_id', ProductSearchClient.product_set_path + ) + + def __init__(self, gcp_conn_id='google_cloud_default', delegate_to=None): + super(CloudVisionHook, self).__init__(gcp_conn_id, delegate_to) + + def get_conn(self): + """ + Retrieves connection to Cloud Vision. + + :return: Google Cloud Vision client object. + :rtype: google.cloud.vision_v1.ProductSearchClient + """ + if not self._client: + self._client = ProductSearchClient(credentials=self._get_credentials()) + return self._client + + @GoogleCloudBaseHook.fallback_to_default_project_id + def create_product_set( + self, + location, + product_set, + project_id=None, + product_set_id=None, + retry=None, + timeout=None, + metadata=None, + ): + """ + For the documentation see: + :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator` + """ + client = self.get_conn() + parent = ProductSearchClient.location_path(project_id, location) + self.log.info('Creating a new ProductSet under the parent: %s', parent) + response = self._handle_request( + lambda **kwargs: client.create_product_set(**kwargs), + parent=parent, + product_set=product_set, + product_set_id=product_set_id, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + self.log.info('ProductSet created: %s', response.name if response else '') + self.log.debug('ProductSet created:\n%s', response) + + if not product_set_id: + # Product set id was generated by the API + product_set_id = self._get_autogenerated_id(response) + self.log.info('Extracted autogenerated ProductSet ID from the response: %s', product_set_id) + + return product_set_id + + @GoogleCloudBaseHook.fallback_to_default_project_id + def get_product_set( + self, location, product_set_id, project_id=None, retry=None, timeout=None, metadata=None + ): + """ + For the documentation see: + :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator` + """ + client = self.get_conn() + name = ProductSearchClient.product_set_path(project_id, location, product_set_id) + self.log.info('Retrieving ProductSet: %s', name) + response = self._handle_request( + lambda **kwargs: client.get_product_set(**kwargs), + name=name, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + self.log.info('ProductSet retrieved.') + self.log.debug('ProductSet retrieved:\n%s', response) + return MessageToDict(response) + + @GoogleCloudBaseHook.fallback_to_default_project_id + def update_product_set( + self, + product_set, + location=None, + product_set_id=None, + update_mask=None, + project_id=None, + retry=None, + timeout=None, + metadata=None, + ): + """ + For the documentation see: + :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator` + """ + client = self.get_conn() + product_set = self.product_set_name_determiner.get_entity_with_name( + product_set, product_set_id, location, project_id + ) + self.log.info('Updating ProductSet: %s', product_set.name) + response = self._handle_request( + lambda **kwargs: client.update_product_set(**kwargs), + product_set=product_set, + update_mask=update_mask, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + self.log.info('ProductSet updated: %s', response.name if response else '') + self.log.debug('ProductSet updated:\n%s', response) + return MessageToDict(response) + + @GoogleCloudBaseHook.fallback_to_default_project_id + def delete_product_set( + self, location, product_set_id, project_id=None, retry=None, timeout=None, metadata=None + ): + """ + For the documentation see: + :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator` + """ + client = self.get_conn() + name = ProductSearchClient.product_set_path(project_id, location, product_set_id) + self.log.info('Deleting ProductSet: %s', name) + response = self._handle_request( + lambda **kwargs: client.delete_product_set(**kwargs), + name=name, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + self.log.info('ProductSet with the name [%s] deleted.', name) + return response + + @GoogleCloudBaseHook.fallback_to_default_project_id + def create_product( + self, location, product, project_id=None, product_id=None, retry=None, timeout=None, metadata=None + ): + """ + For the documentation see: + :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator` + """ + client = self.get_conn() + parent = ProductSearchClient.location_path(project_id, location) + self.log.info('Creating a new Product under the parent: %s', parent) + response = self._handle_request( + lambda **kwargs: client.create_product(**kwargs), + parent=parent, + product=product, + product_id=product_id, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + self.log.info('Product created: %s', response.name if response else '') + self.log.debug('Product created:\n%s', response) + + if not product_id: + # Product id was generated by the API + product_id = self._get_autogenerated_id(response) + self.log.info('Extracted autogenerated Product ID from the response: %s', product_id) + + return product_id + + @GoogleCloudBaseHook.fallback_to_default_project_id + def get_product(self, location, product_id, project_id=None, retry=None, timeout=None, metadata=None): + """ + For the documentation see: + :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator` + """ + client = self.get_conn() + name = ProductSearchClient.product_path(project_id, location, product_id) + self.log.info('Retrieving Product: %s', name) + response = self._handle_request( + lambda **kwargs: client.get_product(**kwargs), + name=name, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + self.log.info('Product retrieved.') + self.log.debug('Product retrieved:\n%s', response) + return MessageToDict(response) + + @GoogleCloudBaseHook.fallback_to_default_project_id + def update_product( + self, + product, + location=None, + product_id=None, + update_mask=None, + project_id=None, + retry=None, + timeout=None, + metadata=None, + ): + """ + For the documentation see: + :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator` + """ + client = self.get_conn() + product = self.product_name_determiner.get_entity_with_name(product, product_id, location, project_id) + self.log.info('Updating ProductSet: %s', product.name) + response = self._handle_request( + lambda **kwargs: client.update_product(**kwargs), + product=product, + update_mask=update_mask, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + self.log.info('Product updated: %s', response.name if response else '') + self.log.debug('Product updated:\n%s', response) + return MessageToDict(response) + + @GoogleCloudBaseHook.fallback_to_default_project_id + def delete_product(self, location, product_id, project_id=None, retry=None, timeout=None, metadata=None): + """ + For the documentation see: + :py:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator` + """ + client = self.get_conn() + name = ProductSearchClient.product_path(project_id, location, product_id) + self.log.info('Deleting ProductSet: %s', name) + response = self._handle_request( + lambda **kwargs: client.delete_product(**kwargs), + name=name, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + self.log.info('Product with the name [%s] deleted:', name) + return response + + def _handle_request(self, fun, **kwargs): + try: + return fun(**kwargs) + except GoogleAPICallError as e: + if isinstance(e, AlreadyExists): + raise e + else: + self.log.error('The request failed:\n%s', str(e)) + raise AirflowException(e) + except RetryError as e: + self.log.error('The request failed due to a retryable error and retry attempts failed.') + raise AirflowException(e) + except ValueError as e: + self.log.error('The request failed, the parameters are invalid.') + raise AirflowException(e) + + @staticmethod + def _get_entity_name(is_product, project_id, location, entity_id): + if is_product: + return ProductSearchClient.product_path(project_id, location, entity_id) + else: + return ProductSearchClient.product_set_path(project_id, location, entity_id) + + @staticmethod + def _get_autogenerated_id(response): + try: + name = response.name + except AttributeError as e: + raise AirflowException('Unable to get name from response... [{}]\n{}'.format(response, e)) + if '/' not in name: + raise AirflowException('Unable to get id from name... [{}]'.format(name)) + return name.rsplit('/', 1)[1] diff --git a/airflow/contrib/operators/gcp_vision_operator.py b/airflow/contrib/operators/gcp_vision_operator.py new file mode 100644 index 0000000000000..96ed11dc2bbdd --- /dev/null +++ b/airflow/contrib/operators/gcp_vision_operator.py @@ -0,0 +1,673 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from google.api_core.exceptions import AlreadyExists + +from airflow.contrib.hooks.gcp_vision_hook import CloudVisionHook +from airflow.models import BaseOperator +from airflow.utils.decorators import apply_defaults + + +class CloudVisionProductSetCreateOperator(BaseOperator): + """ + Creates a new ProductSet resource. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudVisionProductSetCreateOperator` + + :param product_set: (Required) The ProductSet to create. If a dict is provided, it must be of the same + form as the protobuf message `ProductSet`. + :type product_set: dict or google.cloud.vision_v1.types.ProductSet + :param location: (Required) The region where the ProductSet should be created. Valid regions + (as of 2019-02-05) are: us-east1, us-west1, europe-west1, asia-east1 + :type location: str + :param project_id: (Optional) The project in which the ProductSet should be created. If set to None or + missing, the default project_id from the GCP connection is used. + :type project_id: str + :param product_set_id: (Optional) A user-supplied resource id for this ProductSet. + If set, the server will attempt to use this value as the resource id. If it is + already in use, an error is returned with code ALREADY_EXISTS. Must be at most + 128 characters long. It cannot contain the character /. + :type product_set_id: str + :param retry: (Optional) A retry object used to retry requests. If `None` is + specified, requests will not be retried. + :type retry: google.api_core.retry.Retry + :param timeout: (Optional) The amount of time, in seconds, to wait for the request to + complete. Note that if retry is specified, the timeout applies to each individual + attempt. + :type timeout: float + :param metadata: (Optional) Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + """ + + # [START vision_productset_create_template_fields] + template_fields = ('location', 'project_id', 'product_set_id', 'gcp_conn_id') + # [END vision_productset_create_template_fields] + + @apply_defaults + def __init__( + self, + product_set, + location, + project_id=None, + product_set_id=None, + retry=None, + timeout=None, + metadata=None, + gcp_conn_id='google_cloud_default', + *args, + **kwargs + ): + super(CloudVisionProductSetCreateOperator, self).__init__(*args, **kwargs) + self.location = location + self.project_id = project_id + self.product_set = product_set + self.product_set_id = product_set_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self._hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id) + + def execute(self, context): + try: + return self._hook.create_product_set( + location=self.location, + project_id=self.project_id, + product_set=self.product_set, + product_set_id=self.product_set_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + except AlreadyExists: + self.log.info( + 'Product set with id %s already exists. Exiting from the create operation.', + self.product_set_id, + ) + return self.product_set_id + + +class CloudVisionProductSetGetOperator(BaseOperator): + """ + Gets information associated with a ProductSet. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudVisionProductSetGetOperator` + + :param location: (Required) The region where the ProductSet is located. Valid regions (as of 2019-02-05) + are: us-east1, us-west1, europe-west1, asia-east1 + :type location: str + :param product_set_id: (Required) The resource id of this ProductSet. + :type product_set_id: str + :param project_id: (Optional) The project in which the ProductSet is located. If set + to None or missing, the default `project_id` from the GCP connection is used. + :type project_id: str + :param retry: (Optional) A retry object used to retry requests. If `None` is + specified, requests will not be retried. + :type retry: google.api_core.retry.Retry + :param timeout: (Optional) The amount of time, in seconds, to wait for the request to + complete. Note that if retry is specified, the timeout applies to each individual + attempt. + :type timeout: float + :param metadata: (Optional) Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + """ + + # [START vision_productset_get_template_fields] + template_fields = ('location', 'project_id', 'product_set_id', 'gcp_conn_id') + # [END vision_productset_get_template_fields] + + @apply_defaults + def __init__( + self, + location, + product_set_id, + project_id=None, + retry=None, + timeout=None, + metadata=None, + gcp_conn_id='google_cloud_default', + *args, + **kwargs + ): + super(CloudVisionProductSetGetOperator, self).__init__(*args, **kwargs) + self.location = location + self.project_id = project_id + self.product_set_id = product_set_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self._hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id) + + def execute(self, context): + return self._hook.get_product_set( + location=self.location, + product_set_id=self.product_set_id, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + + +class CloudVisionProductSetUpdateOperator(BaseOperator): + """ + Makes changes to a `ProductSet` resource. Only display_name can be updated currently. + + .. note:: To locate the `ProductSet` resource, its `name` in the form + `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` is necessary. + + You can provide the `name` directly as an attribute of the `product_set` object. + However, you can leave it blank and provide `location` and `product_set_id` instead + (and optionally `project_id` - if not present, the connection default will be used) + and the `name` will be created by the operator itself. + + This mechanism exists for your convenience, to allow leaving the `project_id` empty + and having Airflow use the connection default `project_id`. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudVisionProductSetUpdateOperator` + + :param product_set: (Required) The ProductSet resource which replaces the one on the + server. If a dict is provided, it must be of the same form as the protobuf + message `ProductSet`. + :type product_set: dict or google.cloud.vision_v1.types.ProductSet + :param location: (Optional) The region where the ProductSet is located. Valid regions (as of 2019-02-05) + are: us-east1, us-west1, europe-west1, asia-east1 + :type location: str + :param product_set_id: (Optional) The resource id of this ProductSet. + :type product_set_id: str + :param project_id: (Optional) The project in which the ProductSet should be created. If set to None or + missing, the default project_id from the GCP connection is used. + :type project_id: str + :param update_mask: (Optional) The `FieldMask` that specifies which fields to update. If update_mask + isn’t specified, all mutable fields are to be updated. Valid mask path is display_name. If a dict is + provided, it must be of the same form as the protobuf message `FieldMask`. + :type update_mask: dict or google.cloud.vision_v1.types.FieldMask + :param retry: (Optional) A retry object used to retry requests. If `None` is + specified, requests will not be retried. + :type retry: google.api_core.retry.Retry + :param timeout: (Optional) The amount of time, in seconds, to wait for the request to + complete. Note that if retry is specified, the timeout applies to each individual + attempt. + :type timeout: float + :param metadata: (Optional) Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + + """ + + # [START vision_productset_update_template_fields] + template_fields = ('location', 'project_id', 'product_set_id', 'gcp_conn_id') + # [END vision_productset_update_template_fields] + + @apply_defaults + def __init__( + self, + product_set, + location=None, + product_set_id=None, + project_id=None, + update_mask=None, + retry=None, + timeout=None, + metadata=None, + gcp_conn_id='google_cloud_default', + *args, + **kwargs + ): + super(CloudVisionProductSetUpdateOperator, self).__init__(*args, **kwargs) + self.product_set = product_set + self.update_mask = update_mask + self.location = location + self.project_id = project_id + self.product_set_id = product_set_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self._hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id) + + def execute(self, context): + return self._hook.update_product_set( + location=self.location, + product_set_id=self.product_set_id, + project_id=self.project_id, + product_set=self.product_set, + update_mask=self.update_mask, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + + +class CloudVisionProductSetDeleteOperator(BaseOperator): + """ + Permanently deletes a `ProductSet`. `Products` and `ReferenceImages` in the + `ProductSet` are not deleted. The actual image files are not deleted from Google + Cloud Storage. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudVisionProductSetDeleteOperator` + + :param location: (Required) The region where the ProductSet is located. Valid regions (as of 2019-02-05) + are: us-east1, us-west1, europe-west1, asia-east1 + :type location: str + :param product_set_id: (Required) The resource id of this ProductSet. + :type product_set_id: str + :param project_id: (Optional) The project in which the ProductSet should be created. + If set to None or missing, the default project_id from the GCP connection is used. + :type project_id: str + :param retry: (Optional) A retry object used to retry requests. If `None` is + specified, requests will not be retried. + :type retry: google.api_core.retry.Retry + :param timeout: (Optional) The amount of time, in seconds, to wait for the request to + complete. Note that if retry is specified, the timeout applies to each individual + attempt. + :type timeout: float + :param metadata: (Optional) Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + + """ + + # [START vision_productset_delete_template_fields] + template_fields = ('location', 'project_id', 'product_set_id', 'gcp_conn_id') + # [END vision_productset_delete_template_fields] + + @apply_defaults + def __init__( + self, + location, + product_set_id, + project_id=None, + retry=None, + timeout=None, + metadata=None, + gcp_conn_id='google_cloud_default', + *args, + **kwargs + ): + super(CloudVisionProductSetDeleteOperator, self).__init__(*args, **kwargs) + self.location = location + self.project_id = project_id + self.product_set_id = product_set_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self._hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id) + + def execute(self, context): + return self._hook.delete_product_set( + location=self.location, + product_set_id=self.product_set_id, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + + +class CloudVisionProductCreateOperator(BaseOperator): + """ + Creates and returns a new product resource. + + Possible errors regarding the `Product` object provided: + + - Returns INVALID_ARGUMENT if `display_name` is missing or longer than 4096 characters. + - Returns INVALID_ARGUMENT if `description` is longer than 4096 characters. + - Returns INVALID_ARGUMENT if `product_category` is missing or invalid. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudVisionProductCreateOperator` + + :param location: (Required) The region where the Product should be created. Valid regions + (as of 2019-02-05) are: us-east1, us-west1, europe-west1, asia-east1 + :type location: str + :param product: (Required) The product to create. If a dict is provided, it must be of the same form as + the protobuf message `Product`. + :type product: dict or google.cloud.vision_v1.types.Product + :param project_id: (Optional) The project in which the Product should be created. If set to None or + missing, the default project_id from the GCP connection is used. + :type project_id: str + :param product_id: (Optional) A user-supplied resource id for this Product. + If set, the server will attempt to use this value as the resource id. If it is + already in use, an error is returned with code ALREADY_EXISTS. Must be at most + 128 characters long. It cannot contain the character /. + :type product_id: str + :param retry: (Optional) A retry object used to retry requests. If `None` is + specified, requests will not be retried. + :type retry: google.api_core.retry.Retry + :param timeout: (Optional) The amount of time, in seconds, to wait for the request to + complete. Note that if retry is specified, the timeout applies to each individual + attempt. + :type timeout: float + :param metadata: (Optional) Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + + """ + + # [START vision_product_create_template_fields] + template_fields = ('location', 'project_id', 'product_id', 'gcp_conn_id') + # [END vision_product_create_template_fields] + + @apply_defaults + def __init__( + self, + location, + product, + project_id=None, + product_id=None, + retry=None, + timeout=None, + metadata=None, + gcp_conn_id='google_cloud_default', + *args, + **kwargs + ): + super(CloudVisionProductCreateOperator, self).__init__(*args, **kwargs) + self.location = location + self.product = product + self.project_id = project_id + self.product_id = product_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self._hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id) + + def execute(self, context): + try: + return self._hook.create_product( + location=self.location, + product=self.product, + project_id=self.project_id, + product_id=self.product_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + except AlreadyExists: + self.log.info( + 'Product with id %s already exists. Exiting from the create operation.', self.product_id + ) + return self.product_id + + +class CloudVisionProductGetOperator(BaseOperator): + """ + Gets information associated with a `Product`. + + Possible errors: + + - Returns NOT_FOUND if the `Product` does not exist. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudVisionProductGetOperator` + + :param location: (Required) The region where the Product is located. Valid regions (as of 2019-02-05) are: + us-east1, us-west1, europe-west1, asia-east1 + :type location: str + :param product_id: (Required) The resource id of this Product. + :type product_id: str + :param project_id: (Optional) The project in which the Product is located. If set to + None or missing, the default project_id from the GCP connection is used. + :type project_id: str + :param retry: (Optional) A retry object used to retry requests. If `None` is + specified, requests will not be retried. + :type retry: google.api_core.retry.Retry + :param timeout: (Optional) The amount of time, in seconds, to wait for the request to + complete. Note that if retry is specified, the timeout applies to each individual + attempt. + :type timeout: float + :param metadata: (Optional) Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + + """ + + # [START vision_product_get_template_fields] + template_fields = ('location', 'project_id', 'product_id', 'gcp_conn_id') + # [END vision_product_get_template_fields] + + @apply_defaults + def __init__( + self, + location, + product_id, + project_id=None, + retry=None, + timeout=None, + metadata=None, + gcp_conn_id='google_cloud_default', + *args, + **kwargs + ): + super(CloudVisionProductGetOperator, self).__init__(*args, **kwargs) + self.location = location + self.product_id = product_id + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self._hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id) + + def execute(self, context): + return self._hook.get_product( + location=self.location, + product_id=self.product_id, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + + +class CloudVisionProductUpdateOperator(BaseOperator): + """ + Makes changes to a Product resource. Only the display_name, description, and labels fields can be + updated right now. + + If labels are updated, the change will not be reflected in queries until the next index time. + + .. note:: To locate the `Product` resource, its `name` in the form + `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` is necessary. + + You can provide the `name` directly as an attribute of the `product` object. However, you can leave it + blank and provide `location` and `product_id` instead (and optionally `project_id` - if not present, + the connection default will be used) and the `name` will be created by the operator itself. + + This mechanism exists for your convenience, to allow leaving the `project_id` empty and having Airflow + use the connection default `project_id`. + + Possible errors related to the provided `Product`: + + - Returns NOT_FOUND if the Product does not exist. + - Returns INVALID_ARGUMENT if display_name is present in update_mask but is missing from the request or + longer than 4096 characters. + - Returns INVALID_ARGUMENT if description is present in update_mask but is longer than 4096 characters. + - Returns INVALID_ARGUMENT if product_category is present in update_mask. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudVisionProductUpdateOperator` + + :param product: (Required) The Product resource which replaces the one on the server. product.name is + immutable. If a dict is provided, it must be of the same form as the protobuf message `Product`. + :type product: dict or google.cloud.vision_v1.types.ProductSet + :param location: (Optional) The region where the Product is located. Valid regions (as of 2019-02-05) are: + us-east1, us-west1, europe-west1, asia-east1 + :type location: str + :param product_id: (Optional) The resource id of this Product. + :type product_id: str + :param project_id: (Optional) The project in which the Product is located. If set to None or + missing, the default project_id from the GCP connection is used. + :type project_id: str + :param update_mask: (Optional) The `FieldMask` that specifies which fields to update. If update_mask + isn’t specified, all mutable fields are to be updated. Valid mask paths include product_labels, + display_name, and description. If a dict is provided, it must be of the same form as the protobuf + message `FieldMask`. + :type update_mask: dict or google.cloud.vision_v1.types.FieldMask + :param retry: (Optional) A retry object used to retry requests. If `None` is + specified, requests will not be retried. + :type retry: google.api_core.retry.Retry + :param timeout: (Optional) The amount of time, in seconds, to wait for the request to + complete. Note that if retry is specified, the timeout applies to each individual + attempt. + :type timeout: float + :param metadata: (Optional) Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + """ + + # [START vision_product_update_template_fields] + template_fields = ('location', 'project_id', 'product_id', 'gcp_conn_id') + # [END vision_product_update_template_fields] + + @apply_defaults + def __init__( + self, + product, + location=None, + product_id=None, + project_id=None, + update_mask=None, + retry=None, + timeout=None, + metadata=None, + gcp_conn_id='google_cloud_default', + *args, + **kwargs + ): + super(CloudVisionProductUpdateOperator, self).__init__(*args, **kwargs) + self.product = product + self.location = location + self.product_id = product_id + self.project_id = project_id + self.update_mask = update_mask + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self._hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id) + + def execute(self, context): + return self._hook.update_product( + product=self.product, + location=self.location, + product_id=self.product_id, + project_id=self.project_id, + update_mask=self.update_mask, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + + +class CloudVisionProductDeleteOperator(BaseOperator): + """ + Permanently deletes a product and its reference images. + + Metadata of the product and all its images will be deleted right away, but search queries against + ProductSets containing the product may still work until all related caches are refreshed. + + Possible errors: + + - Returns NOT_FOUND if the product does not exist. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:CloudVisionProductDeleteOperator` + + :param location: (Required) The region where the Product is located. Valid regions (as of 2019-02-05) are: + us-east1, us-west1, europe-west1, asia-east1 + :type location: str + :param product_id: (Required) The resource id of this Product. + :type product_id: str + :param project_id: (Optional) The project in which the Product is located. If set to None or + missing, the default project_id from the GCP connection is used. + :type project_id: str + :param retry: (Optional) A retry object used to retry requests. If `None` is + specified, requests will not be retried. + :type retry: google.api_core.retry.Retry + :param timeout: (Optional) The amount of time, in seconds, to wait for the request to + complete. Note that if retry is specified, the timeout applies to each individual + attempt. + :type timeout: float + :param metadata: (Optional) Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + """ + + # [START vision_product_delete_template_fields] + template_fields = ('location', 'project_id', 'product_id', 'gcp_conn_id') + # [END vision_product_delete_template_fields] + + @apply_defaults + def __init__( + self, + location, + product_id, + project_id=None, + retry=None, + timeout=None, + metadata=None, + gcp_conn_id='google_cloud_default', + *args, + **kwargs + ): + super(CloudVisionProductDeleteOperator, self).__init__(*args, **kwargs) + self.location = location + self.product_id = product_id + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self._hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id) + + def execute(self, context): + return self._hook.delete_product( + location=self.location, + product_id=self.product_id, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) diff --git a/docs/code.rst b/docs/code.rst index 5ccc2f438fab1..b14fb09648809 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -208,6 +208,14 @@ Operators .. autoclass:: airflow.contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator .. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator .. autoclass:: airflow.contrib.operators.gcp_function_operator.GcfFunctionDeployOperator +.. autoclass:: airflow.contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator +.. autoclass:: airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator +.. autoclass:: airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator +.. autoclass:: airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator +.. autoclass:: airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator +.. autoclass:: airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator .. autoclass:: airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator .. autoclass:: airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator .. autoclass:: airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator @@ -481,6 +489,7 @@ Community contributed hooks .. autoclass:: airflow.contrib.hooks.gcp_spanner_hook.CloudSpannerHook .. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook .. autoclass:: airflow.contrib.hooks.gcp_sql_hook.CloudSqlHook +.. autoclass:: airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook .. autoclass:: airflow.contrib.hooks.cloudant_hook.CloudantHook .. autoclass:: airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook .. autoclass:: airflow.contrib.hooks.gcp_dataproc_hook.DataProcHook diff --git a/docs/howto/operator.rst b/docs/howto/operator.rst index 34752383a3774..01926a0ae35cd 100644 --- a/docs/howto/operator.rst +++ b/docs/howto/operator.rst @@ -1735,3 +1735,595 @@ More information See `Google Cloud Storage ObjectAccessControls insert documentation `_. + +Google Cloud Vision Operators +------------------------------ + +.. _howto/operator:CloudVisionProductSetCreateOperator: + +CloudVisionProductSetCreateOperator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Creates a new :code:`ProductSet` resource. + +For parameter definition, take a look at +:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator` + +Arguments +""""""""" + +Some arguments in the example DAG are taken from the OS environment variables: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_args_common] + :end-before: [END howto_operator_vision_args_common] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_productset_explicit_id] + :end-before: [END howto_operator_vision_productset_explicit_id] + +Using the operator +"""""""""""""""""" + +We are using the ``ProductSet`` and ``Retry`` objects from Google libraries: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_productset_import] + :end-before: [END howto_operator_vision_productset_import] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_retry_import] + :end-before: [END howto_operator_vision_retry_import] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_productset] + :end-before: [END howto_operator_vision_productset] + +The ``product_set_id`` argument can be omitted (it will be generated by the API): + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_set_create] + :end-before: [END howto_operator_vision_product_set_create] + +Or it can be specified explicitly: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_set_create_2] + :end-before: [END howto_operator_vision_product_set_create_2] + + +Templating +"""""""""" + +.. literalinclude:: ../../airflow/contrib/operators/gcp_vision_operator.py + :language: python + :dedent: 4 + :start-after: [START vision_productset_create_template_fields] + :end-before: [END vision_productset_create_template_fields] + +More information +"""""""""""""""" + +See `Google Cloud Vision ProductSet create documentation +`_. + +.. _howto/operator:CloudVisionProductSetGetOperator: + +CloudVisionProductSetGetOperator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Gets information associated with a :code:`ProductSet`. + +For parameter definition, take a look at +:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator` + +Arguments +""""""""" + +Some arguments in the example DAG are taken from the OS environment variables: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_args_common] + :end-before: [END howto_operator_vision_args_common] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_productset_explicit_id] + :end-before: [END howto_operator_vision_productset_explicit_id] + +Using the operator +"""""""""""""""""" + +If ``product_set_id`` was generated by the API it can be extracted from XCOM: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_set_get] + :end-before: [END howto_operator_vision_product_set_get] + +Otherwise it can be specified explicitly: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_set_get_2] + :end-before: [END howto_operator_vision_product_set_get_2] + +Templating +"""""""""" + +.. literalinclude:: ../../airflow/contrib/operators/gcp_vision_operator.py + :language: python + :dedent: 4 + :start-after: [START vision_productset_get_template_fields] + :end-before: [END vision_productset_get_template_fields] + +More information +"""""""""""""""" + +See `Google Cloud Vision ProductSet get documentation +`_. + +.. _howto/operator:CloudVisionProductSetUpdateOperator: + +CloudVisionProductSetUpdateOperator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Makes changes to a :code:`ProductSet` resource. Only :code:`display_name` can be updated +currently. + +.. note:: To locate the `ProductSet` resource, its `name` in the form + ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` is necessary. + +You can provide the `name` directly as an attribute of the `product_set` object. +However, you can leave it blank and provide `location` and `product_set_id` instead (and +optionally `project_id` - if not present, the connection default will be used) and the +`name` will be created by the operator itself. + +This mechanism exists for your convenience, to allow leaving the `project_id` empty and +having Airflow use the connection default `project_id`. + +For parameter definition, take a look at +:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator` + +Arguments +""""""""" + +Some arguments in the example DAG are taken from the OS environment variables: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_args_common] + :end-before: [END howto_operator_vision_args_common] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_productset_explicit_id] + :end-before: [END howto_operator_vision_productset_explicit_id] + +Using the operator +"""""""""""""""""" + +We are using the ``ProductSet`` object from the Google Cloud Vision library: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_productset_import] + :end-before: [END howto_operator_vision_productset_import] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_productset] + :end-before: [END howto_operator_vision_productset] + +Initialization of the task: + +If ``product_set_id`` was generated by the API it can be extracted from XCOM: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_set_update] + :end-before: [END howto_operator_vision_product_set_update] + +Otherwise it can be specified explicitly: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_set_update_2] + :end-before: [END howto_operator_vision_product_set_update_2] + +Templating +"""""""""" + +.. literalinclude:: ../../airflow/contrib/operators/gcp_vision_operator.py + :language: python + :dedent: 4 + :start-after: [START vision_productset_update_template_fields] + :end-before: [END vision_productset_update_template_fields] + +More information +"""""""""""""""" + +See `Google Cloud Vision ProductSet update documentation +`_. + +.. _howto/operator:CloudVisionProductSetDeleteOperator: + +CloudVisionProductSetDeleteOperator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Permanently deletes a :code:`ProductSet`. :code:`Products` and :code:`ReferenceImages` in +the :code:`ProductSet` are not deleted. The actual image files are not deleted from +Google Cloud Storage. + +For parameter definition, take a look at +:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator` + +Arguments +""""""""" + +Some arguments in the example DAG are taken from the OS environment variables: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_args_common] + :end-before: [END howto_operator_vision_args_common] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_productset_explicit_id] + :end-before: [END howto_operator_vision_productset_explicit_id] + +Using the operator +"""""""""""""""""" + +If ``product_set_id`` was generated by the API it can be extracted from XCOM: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_set_delete] + :end-before: [END howto_operator_vision_product_set_delete] + +Otherwise it can be specified explicitly: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_set_delete_2] + :end-before: [END howto_operator_vision_product_set_delete_2] + +Templating +"""""""""" + +.. literalinclude:: ../../airflow/contrib/operators/gcp_vision_operator.py + :language: python + :dedent: 4 + :start-after: [START vision_productset_delete_template_fields] + :end-before: [END vision_productset_delete_template_fields] + +More information +"""""""""""""""" + +See `Google Cloud Vision ProductSet delete documentation +`_. + +.. _howto/operator:CloudVisionProductCreateOperator: + +CloudVisionProductCreateOperator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Creates and returns a new product resource. + +Possible errors regarding the :code:`Product` object provided: + +- Returns INVALID_ARGUMENT if `display_name` is missing or longer than 4096 characters. +- Returns INVALID_ARGUMENT if `description` is longer than 4096 characters. +- Returns INVALID_ARGUMENT if `product_category` is missing or invalid. + +For parameter definition, take a look at +:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator` + +Arguments +""""""""" + +Some arguments in the example DAG are taken from the OS environment variables: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_args_common] + :end-before: [END howto_operator_vision_args_common] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_product_explicit_id] + :end-before: [END howto_operator_vision_product_explicit_id] + +Using the operator +"""""""""""""""""" + +We are using the ``Product`` and ``Retry`` objects from Google libraries: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_product_import] + :end-before: [END howto_operator_vision_product_import] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_retry_import] + :end-before: [END howto_operator_vision_retry_import] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_product] + :end-before: [END howto_operator_vision_product] + +The ``product_id`` argument can be omitted (it will be generated by the API): + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_create] + :end-before: [END howto_operator_vision_product_create] + +Or it can be specified explicitly: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_create_2] + :end-before: [END howto_operator_vision_product_create_2] + + +Templating +"""""""""" + +.. literalinclude:: ../../airflow/contrib/operators/gcp_vision_operator.py + :language: python + :dedent: 4 + :start-after: [START vision_product_create_template_fields] + :end-before: [END vision_product_create_template_fields] + +More information +"""""""""""""""" + +See `Google Cloud Vision Product create documentation +`_. + +.. _howto/operator:CloudVisionProductGetOperator: + +CloudVisionProductGetOperator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Gets information associated with a :code:`Product`. + +Possible errors: + +- Returns NOT_FOUND if the `Product` does not exist. + +For parameter definition, take a look at +:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator` + +Arguments +""""""""" + +Some arguments in the example DAG are taken from the OS environment variables: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_args_common] + :end-before: [END howto_operator_vision_args_common] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_product_explicit_id] + :end-before: [END howto_operator_vision_product_explicit_id] + +Using the operator +"""""""""""""""""" + +If ``product_id`` was generated by the API it can be extracted from XCOM: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_get] + :end-before: [END howto_operator_vision_product_get] + +Otherwise it can be specified explicitly: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_get_2] + :end-before: [END howto_operator_vision_product_get_2] + +Templating +"""""""""" + +.. literalinclude:: ../../airflow/contrib/operators/gcp_vision_operator.py + :language: python + :dedent: 4 + :start-after: [START vision_product_get_template_fields] + :end-before: [END vision_product_get_template_fields] + +More information +"""""""""""""""" + +See `Google Cloud Vision Product get documentation +`_. + +.. _howto/operator:CloudVisionProductUpdateOperator: + +CloudVisionProductUpdateOperator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Makes changes to a :code:`Product` resource. Only the :code:`display_name`, +:code:`description`, and :code:`labels` fields can be updated right now. +If labels are updated, the change will not be reflected in queries until the next index +time. + +.. note:: To locate the `Product` resource, its `name` in the form + ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` is necessary. + +You can provide the `name` directly as an attribute of the `product` object. However, you +can leave it blank and provide `location` and `product_id` instead (and optionally +`project_id` - if not present, the connection default will be used) and the `name` will +be created by the operator itself. + +This mechanism exists for your convenience, to allow leaving the `project_id` empty and +having Airflow use the connection default `project_id`. + +Possible errors: + +- Returns NOT_FOUND if the `Product` does not exist. +- Returns INVALID_ARGUMENT if `display_name` is present in `update_mask` but is missing + from the request or longer than 4096 characters. +- Returns INVALID_ARGUMENT if `description` is present in `update_mask` but is longer than + 4096 characters. +- Returns INVALID_ARGUMENT if `product_category` is present in `update_mask`. + +For parameter definition, take a look at +:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator` + +Arguments +""""""""" + +Some arguments in the example DAG are taken from the OS environment variables: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_args_common] + :end-before: [END howto_operator_vision_args_common] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_product_explicit_id] + :end-before: [END howto_operator_vision_product_explicit_id] + +Using the operator +"""""""""""""""""" + +We are using the ``Product`` object from the Google Cloud Vision library: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_product_import] + :end-before: [END howto_operator_vision_product_import] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_product] + :end-before: [END howto_operator_vision_product] + +If ``product_id`` was generated by the API it can be extracted from XCOM: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_update] + :end-before: [END howto_operator_vision_product_update] + +Otherwise it can be specified explicitly: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_update_2] + :end-before: [END howto_operator_vision_product_update_2] + +Templating +"""""""""" + +.. literalinclude:: ../../airflow/contrib/operators/gcp_vision_operator.py + :language: python + :dedent: 4 + :start-after: [START vision_product_update_template_fields] + :end-before: [END vision_product_update_template_fields] + +More information +"""""""""""""""" + +See `Google Cloud Vision Product update documentation +`_. + +.. _howto/operator:CloudVisionProductDeleteOperator: + +CloudVisionProductDeleteOperator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Permanently deletes a product and its reference images. + +Metadata of the product and all its images will be deleted right away, but search queries +against :code:`ProductSets` containing the product may still work until all related +caches are refreshed. + +Possible errors: + +- Returns NOT_FOUND if the product does not exist. + +For parameter definition, take a look at +:class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator` + +Arguments +""""""""" + +Some arguments in the example DAG are taken from the OS environment variables: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_args_common] + :end-before: [END howto_operator_vision_args_common] + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :start-after: [START howto_operator_vision_product_explicit_id] + :end-before: [END howto_operator_vision_product_explicit_id] + +Using the operator +"""""""""""""""""" + +If ``product_id`` was generated by the API it can be extracted from XCOM: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_delete] + :end-before: [END howto_operator_vision_product_delete] + +Otherwise it can be specified explicitly: + +.. literalinclude:: ../../airflow/contrib/example_dags/example_gcp_vision.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_vision_product_delete_2] + :end-before: [END howto_operator_vision_product_delete_2] + +Templating +"""""""""" + +.. literalinclude:: ../../airflow/contrib/operators/gcp_vision_operator.py + :language: python + :dedent: 4 + :start-after: [START vision_product_delete_template_fields] + :end-before: [END vision_product_delete_template_fields] + +More information +"""""""""""""""" + +See `Google Cloud Vision Product delete documentation +`_. diff --git a/docs/integration.rst b/docs/integration.rst index 27f29d6bb075f..aa13ebf91806f 100644 --- a/docs/integration.rst +++ b/docs/integration.rst @@ -593,6 +593,31 @@ Transfer Service They also use :class:`airflow.contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook` to communicate with Google Cloud Platform. +Cloud Vision +'''''''''''' + +Cloud Vision Product Search Operators +""""""""""""""""""""""""""""""""""""" + +:class:`airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator` + Creates a new ProductSet resource. +:class:`airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator` + Gets information associated with a ProductSet. +:class:`airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator` + Makes changes to a ProductSet resource. +:class:`airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator` + Permanently deletes a ProductSet. +:class:`airflow.contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator` + Creates a new Product resource. +:class:`airflow.contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator` + Gets information associated with a Product. +:class:`airflow.contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator` + Makes changes to a Product resource. +:class:`airflow.contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator` + Permanently deletes a product and its reference images. + +They also use :class:`airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook` to communicate with Google Cloud Platform. + Google Kubernetes Engine '''''''''''''''''''''''' diff --git a/setup.py b/setup.py index d090d0b9a9f5a..03219e312917e 100644 --- a/setup.py +++ b/setup.py @@ -179,6 +179,7 @@ def write_version(filename=os.path.join(*['airflow', 'google-cloud-container>=0.1.1', 'google-cloud-bigtable==0.31.0', 'google-cloud-spanner>=1.7.1', + 'google-cloud-vision>=0.35.2', 'grpcio-gcp>=0.2.2', 'PyOpenSSL', 'pandas-gbq' diff --git a/tests/contrib/hooks/test_gcp_vision_hook.py b/tests/contrib/hooks/test_gcp_vision_hook.py new file mode 100644 index 0000000000000..ddc7114d8a612 --- /dev/null +++ b/tests/contrib/hooks/test_gcp_vision_hook.py @@ -0,0 +1,548 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +from google.cloud.vision_v1 import ProductSearchClient +from google.cloud.vision_v1.proto.product_search_service_pb2 import ProductSet, Product +from google.protobuf.json_format import MessageToDict +from parameterized import parameterized + +from airflow.contrib.hooks.gcp_vision_hook import CloudVisionHook +from tests.contrib.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +try: + from unittest import mock +except ImportError: + try: + import mock + except ImportError: + mock = None + +from airflow import AirflowException + +PROJECT_ID_TEST = 'project-id' +PROJECT_ID_TEST_2 = 'project-id-2' +LOC_ID_TEST = 'loc-id' +LOC_ID_TEST_2 = 'loc-id-2' +PRODUCTSET_ID_TEST = 'ps-id' +PRODUCTSET_ID_TEST_2 = 'ps-id-2' +PRODUCT_ID_TEST = 'p-id' +PRODUCT_ID_TEST_2 = 'p-id-2' + + +class TestGcpVisionHook(unittest.TestCase): + def setUp(self): + with mock.patch( + 'airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.__init__', + new=mock_base_gcp_hook_default_project_id, + ): + self.vision_hook_default_project_id = CloudVisionHook(gcp_conn_id='test') + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_create_productset_explicit_id(self, get_conn): + # Given + create_product_set_method = get_conn.return_value.create_product_set + create_product_set_method.return_value = None + parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) + hook = self.vision_hook_default_project_id + product_set = ProductSet() + # When + result = hook.create_product_set( + location=LOC_ID_TEST, + product_set_id=PRODUCTSET_ID_TEST, + product_set=product_set, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + # Then + # ProductSet ID was provided explicitly in the method call above, should be returned from the method + self.assertEqual(result, PRODUCTSET_ID_TEST) + create_product_set_method.assert_called_once_with( + parent=parent, + product_set=product_set, + product_set_id=PRODUCTSET_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_create_productset_autogenerated_id(self, get_conn): + # Given + autogenerated_id = 'autogen-id' + response_product_set = ProductSet( + name=ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id) + ) + create_product_set_method = get_conn.return_value.create_product_set + create_product_set_method.return_value = response_product_set + parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) + hook = self.vision_hook_default_project_id + product_set = ProductSet() + # When + result = hook.create_product_set( + location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST + ) + # Then + # ProductSet ID was not provided in the method call above. Should be extracted from the API response + # and returned. + self.assertEqual(result, autogenerated_id) + create_product_set_method.assert_called_once_with( + parent=parent, + product_set=product_set, + product_set_id=None, + retry=None, + timeout=None, + metadata=None, + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_create_productset_autogenerated_id_wrong_api_response(self, get_conn): + # Given + response_product_set = None + create_product_set_method = get_conn.return_value.create_product_set + create_product_set_method.return_value = response_product_set + parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) + hook = self.vision_hook_default_project_id + product_set = ProductSet() + # When + with self.assertRaises(AirflowException) as cm: + hook.create_product_set( + location=LOC_ID_TEST, + product_set_id=None, + product_set=product_set, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + # Then + # API response was wrong (None) and thus ProductSet ID extraction should fail. + err = cm.exception + self.assertIn('Unable to get name from response...', str(err)) + create_product_set_method.assert_called_once_with( + parent=parent, + product_set=product_set, + product_set_id=None, + retry=None, + timeout=None, + metadata=None, + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_get_productset(self, get_conn): + # Given + name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST) + response_product_set = ProductSet(name=name) + get_product_set_method = get_conn.return_value.get_product_set + get_product_set_method.return_value = response_product_set + hook = self.vision_hook_default_project_id + # When + response = hook.get_product_set( + location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST + ) + # Then + self.assertTrue(response) + self.assertEqual(response, MessageToDict(response_product_set)) + get_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_update_productset_no_explicit_name(self, get_conn): + # Given + product_set = ProductSet() + update_product_set_method = get_conn.return_value.update_product_set + update_product_set_method.return_value = product_set + hook = self.vision_hook_default_project_id + productset_name = ProductSearchClient.product_set_path( + PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST + ) + # When + result = hook.update_product_set( + location=LOC_ID_TEST, + product_set_id=PRODUCTSET_ID_TEST, + product_set=product_set, + update_mask=None, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + # Then + self.assertEqual(result, MessageToDict(product_set)) + update_product_set_method.assert_called_once_with( + product_set=ProductSet(name=productset_name), + metadata=None, + retry=None, + timeout=None, + update_mask=None, + ) + + @parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)]) + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_update_productset_no_explicit_name_and_missing_params_for_constructed_name( + self, location, product_set_id, get_conn + ): + # Given + update_product_set_method = get_conn.return_value.update_product_set + update_product_set_method.return_value = None + hook = self.vision_hook_default_project_id + product_set = ProductSet() + # When + with self.assertRaises(AirflowException) as cm: + hook.update_product_set( + location=location, + product_set_id=product_set_id, + product_set=product_set, + update_mask=None, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + err = cm.exception + self.assertTrue(err) + self.assertIn( + "Unable to determine the ProductSet name. Please either set the name directly in the " + "ProductSet object or provide the `location` and `productset_id` parameters.", + str(err), + ) + update_product_set_method.assert_not_called() + + @parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)]) + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_update_productset_explicit_name_missing_params_for_constructed_name( + self, location, product_set_id, get_conn + ): + # Given + explicit_ps_name = ProductSearchClient.product_set_path( + PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2 + ) + product_set = ProductSet(name=explicit_ps_name) + update_product_set_method = get_conn.return_value.update_product_set + update_product_set_method.return_value = product_set + hook = self.vision_hook_default_project_id + # When + result = hook.update_product_set( + location=location, + product_set_id=product_set_id, + product_set=product_set, + update_mask=None, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + # Then + self.assertEqual(result, MessageToDict(product_set)) + update_product_set_method.assert_called_once_with( + product_set=ProductSet(name=explicit_ps_name), + metadata=None, + retry=None, + timeout=None, + update_mask=None, + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_update_productset_explicit_name_different_from_constructed(self, get_conn): + # Given + update_product_set_method = get_conn.return_value.update_product_set + update_product_set_method.return_value = None + hook = self.vision_hook_default_project_id + explicit_ps_name = ProductSearchClient.product_set_path( + PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2 + ) + product_set = ProductSet(name=explicit_ps_name) + template_ps_name = ProductSearchClient.product_set_path( + PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST + ) + # When + # Location and product_set_id are passed in addition to a ProductSet with an explicit name, + # but both names differ (constructed != explicit). + # Should throw AirflowException in this case. + with self.assertRaises(AirflowException) as cm: + hook.update_product_set( + location=LOC_ID_TEST, + product_set_id=PRODUCTSET_ID_TEST, + product_set=product_set, + update_mask=None, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + err = cm.exception + # self.assertIn("The required parameter 'project_id' is missing", str(err)) + self.assertTrue(err) + self.assertIn( + "The ProductSet name provided in the object ({}) is different than the name " + "created from the input parameters ({}). Please either: 1) Remove the ProductSet " + "name, 2) Remove the location and productset_id parameters, 3) Unify the " + "ProductSet name and input parameters.".format(explicit_ps_name, template_ps_name), + str(err), + ) + update_product_set_method.assert_not_called() + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_delete_productset(self, get_conn): + # Given + delete_product_set_method = get_conn.return_value.delete_product_set + delete_product_set_method.return_value = None + name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST) + hook = self.vision_hook_default_project_id + # When + response = hook.delete_product_set( + location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST + ) + # Then + self.assertIsNone(response) + delete_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_create_product_explicit_id(self, get_conn): + # Given + create_product_method = get_conn.return_value.create_product + create_product_method.return_value = None + parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) + hook = self.vision_hook_default_project_id + product = Product() + # When + result = hook.create_product( + location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, project_id=PROJECT_ID_TEST + ) + # Then + # Product ID was provided explicitly in the method call above, should be returned from the method + self.assertEqual(result, PRODUCT_ID_TEST) + create_product_method.assert_called_once_with( + parent=parent, + product=product, + product_id=PRODUCT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_create_product_autogenerated_id(self, get_conn): + # Given + autogenerated_id = 'autogen-p-id' + response_product = Product( + name=ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id) + ) + create_product_method = get_conn.return_value.create_product + create_product_method.return_value = response_product + parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) + hook = self.vision_hook_default_project_id + product = Product() + # When + result = hook.create_product( + location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST + ) + # Then + # Product ID was not provided in the method call above. Should be extracted from the API response + # and returned. + self.assertEqual(result, autogenerated_id) + create_product_method.assert_called_once_with( + parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_create_product_autogenerated_id_wrong_name_in_response(self, get_conn): + # Given + wrong_name = 'wrong_name_not_a_correct_path' + response_product = Product(name=wrong_name) + create_product_method = get_conn.return_value.create_product + create_product_method.return_value = response_product + parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) + hook = self.vision_hook_default_project_id + product = Product() + # When + with self.assertRaises(AirflowException) as cm: + hook.create_product( + location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST + ) + # Then + # API response was wrong (wrong name format) and thus ProductSet ID extraction should fail. + err = cm.exception + self.assertIn('Unable to get id from name', str(err)) + create_product_method.assert_called_once_with( + parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_create_product_autogenerated_id_wrong_api_response(self, get_conn): + # Given + response_product = None + create_product_method = get_conn.return_value.create_product + create_product_method.return_value = response_product + parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) + hook = self.vision_hook_default_project_id + product = Product() + # When + with self.assertRaises(AirflowException) as cm: + hook.create_product( + location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST + ) + # Then + # API response was wrong (None) and thus ProductSet ID extraction should fail. + err = cm.exception + self.assertIn('Unable to get name from response...', str(err)) + create_product_method.assert_called_once_with( + parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_update_product_no_explicit_name(self, get_conn): + # Given + product = Product() + update_product_method = get_conn.return_value.update_product + update_product_method.return_value = product + hook = self.vision_hook_default_project_id + product_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) + # When + result = hook.update_product( + location=LOC_ID_TEST, + product_id=PRODUCT_ID_TEST, + product=product, + update_mask=None, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + # Then + self.assertEqual(result, MessageToDict(product)) + update_product_method.assert_called_once_with( + product=Product(name=product_name), metadata=None, retry=None, timeout=None, update_mask=None + ) + + @parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)]) + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_update_product_no_explicit_name_and_missing_params_for_constructed_name( + self, location, product_id, get_conn + ): + # Given + update_product_method = get_conn.return_value.update_product + update_product_method.return_value = None + hook = self.vision_hook_default_project_id + product = Product() + # When + with self.assertRaises(AirflowException) as cm: + hook.update_product( + location=location, + product_id=product_id, + product=product, + update_mask=None, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + err = cm.exception + self.assertTrue(err) + self.assertIn( + "Unable to determine the Product name. Please either set the name directly in the " + "Product object or provide the `location` and `product_id` parameters.", + str(err), + ) + update_product_method.assert_not_called() + + @parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)]) + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_update_product_explicit_name_missing_params_for_constructed_name( + self, location, product_id, get_conn + ): + # Given + explicit_p_name = ProductSearchClient.product_path( + PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2 + ) + product = Product(name=explicit_p_name) + update_product_method = get_conn.return_value.update_product + update_product_method.return_value = product + hook = self.vision_hook_default_project_id + # When + result = hook.update_product( + location=location, + product_id=product_id, + product=product, + update_mask=None, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + # Then + self.assertEqual(result, MessageToDict(product)) + update_product_method.assert_called_once_with( + product=Product(name=explicit_p_name), metadata=None, retry=None, timeout=None, update_mask=None + ) + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_update_product_explicit_name_different_from_constructed(self, get_conn): + # Given + update_product_method = get_conn.return_value.update_product + update_product_method.return_value = None + hook = self.vision_hook_default_project_id + explicit_p_name = ProductSearchClient.product_path( + PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2 + ) + product = Product(name=explicit_p_name) + template_p_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) + # When + # Location and product_id are passed in addition to a Product with an explicit name, + # but both names differ (constructed != explicit). + # Should throw AirflowException in this case. + with self.assertRaises(AirflowException) as cm: + hook.update_product( + location=LOC_ID_TEST, + product_id=PRODUCT_ID_TEST, + product=product, + update_mask=None, + project_id=PROJECT_ID_TEST, + retry=None, + timeout=None, + metadata=None, + ) + err = cm.exception + self.assertTrue(err) + self.assertIn( + "The Product name provided in the object ({}) is different than the name created from the input " + "parameters ({}). Please either: 1) Remove the Product name, 2) Remove the location and product_" + "id parameters, 3) Unify the Product name and input parameters.".format( + explicit_p_name, template_p_name + ), + str(err), + ) + update_product_method.assert_not_called() + + @mock.patch('airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.get_conn') + def test_delete_product(self, get_conn): + # Given + delete_product_method = get_conn.return_value.delete_product + delete_product_method.return_value = None + name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) + hook = self.vision_hook_default_project_id + # When + response = hook.delete_product( + location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, project_id=PROJECT_ID_TEST + ) + # Then + self.assertIsNone(response) + delete_product_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) diff --git a/tests/contrib/operators/test_gcp_vision_operator.py b/tests/contrib/operators/test_gcp_vision_operator.py new file mode 100644 index 0000000000000..a806f85021d69 --- /dev/null +++ b/tests/contrib/operators/test_gcp_vision_operator.py @@ -0,0 +1,234 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +from google.api_core.exceptions import AlreadyExists +from google.cloud.vision_v1.proto.product_search_service_pb2 import ProductSet, Product + +from airflow.contrib.operators.gcp_vision_operator import ( + CloudVisionProductSetCreateOperator, + CloudVisionProductSetGetOperator, + CloudVisionProductSetUpdateOperator, + CloudVisionProductSetDeleteOperator, + CloudVisionProductCreateOperator, + CloudVisionProductGetOperator, + CloudVisionProductUpdateOperator, + CloudVisionProductDeleteOperator, +) + +try: + # noinspection PyProtectedMember + from unittest import mock +except ImportError: + try: + import mock + except ImportError: + mock = None + +PRODUCTSET_TEST = ProductSet(display_name='Test Product Set') +PRODUCTSET_ID_TEST = 'my-productset' +PRODUCT_TEST = Product(display_name='My Product 1', product_category='toys') +PRODUCT_ID_TEST = 'my-product' +LOCATION_TEST = 'europe-west1' +GCP_CONN_ID = 'google_cloud_default' + + +class CloudVisionProductSetCreateTest(unittest.TestCase): + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook') + def test_minimal_green_path(self, mock_hook): + mock_hook.return_value.create_product_set.return_value = {} + op = CloudVisionProductSetCreateOperator( + location=LOCATION_TEST, product_set=PRODUCTSET_TEST, task_id='id' + ) + op.execute(context=None) + mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) + mock_hook.return_value.create_product_set.assert_called_once_with( + location=LOCATION_TEST, + product_set=PRODUCTSET_TEST, + product_set_id=None, + project_id=None, + retry=None, + timeout=None, + metadata=None, + ) + + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook.get_conn') + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook._handle_request') + def test_already_exists(self, _handle_request, get_conn): + get_conn.return_value = {} + _handle_request.side_effect = AlreadyExists(message='') + # Exception AlreadyExists not raised, caught in the operator's execute() - idempotence + op = CloudVisionProductSetCreateOperator( + location=LOCATION_TEST, + product_set=PRODUCTSET_TEST, + product_set_id=PRODUCTSET_ID_TEST, + project_id='mock-project-id', + task_id='id', + ) + result = op.execute(None) + self.assertEqual(PRODUCTSET_ID_TEST, result) + + +class CloudVisionProductSetUpdateTest(unittest.TestCase): + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook') + def test_minimal_green_path(self, mock_hook): + mock_hook.return_value.update_product_set.return_value = {} + op = CloudVisionProductSetUpdateOperator( + location=LOCATION_TEST, product_set=PRODUCTSET_TEST, task_id='id' + ) + op.execute(context=None) + mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) + mock_hook.return_value.update_product_set.assert_called_once_with( + location=LOCATION_TEST, + product_set=PRODUCTSET_TEST, + product_set_id=None, + project_id=None, + retry=None, + timeout=None, + metadata=None, + update_mask=None, + ) + + +class CloudVisionProductSetGetTest(unittest.TestCase): + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook') + def test_minimal_green_path(self, mock_hook): + mock_hook.return_value.get_product_set.return_value = {} + op = CloudVisionProductSetGetOperator( + location=LOCATION_TEST, product_set_id=PRODUCTSET_ID_TEST, task_id='id' + ) + op.execute(context=None) + mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) + mock_hook.return_value.get_product_set.assert_called_once_with( + location=LOCATION_TEST, + product_set_id=PRODUCTSET_ID_TEST, + project_id=None, + retry=None, + timeout=None, + metadata=None, + ) + + +class CloudVisionProductSetDeleteTest(unittest.TestCase): + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook') + def test_minimal_green_path(self, mock_hook): + mock_hook.return_value.delete_product_set.return_value = {} + op = CloudVisionProductSetDeleteOperator( + location=LOCATION_TEST, product_set_id=PRODUCTSET_ID_TEST, task_id='id' + ) + op.execute(context=None) + mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) + mock_hook.return_value.delete_product_set.assert_called_once_with( + location=LOCATION_TEST, + product_set_id=PRODUCTSET_ID_TEST, + project_id=None, + retry=None, + timeout=None, + metadata=None, + ) + + +class CloudVisionProductCreateTest(unittest.TestCase): + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook') + def test_minimal_green_path(self, mock_hook): + mock_hook.return_value.create_product.return_value = {} + op = CloudVisionProductCreateOperator(location=LOCATION_TEST, product=PRODUCT_TEST, task_id='id') + op.execute(context=None) + mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) + mock_hook.return_value.create_product.assert_called_once_with( + location=LOCATION_TEST, + product=PRODUCT_TEST, + product_id=None, + project_id=None, + retry=None, + timeout=None, + metadata=None, + ) + + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook.get_conn') + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook._handle_request') + def test_already_exists(self, _handle_request, get_conn): + get_conn.return_value = {} + _handle_request.side_effect = AlreadyExists(message='') + # Exception AlreadyExists not raised, caught in the operator's execute() - idempotence + op = CloudVisionProductCreateOperator( + location=LOCATION_TEST, + product=PRODUCT_TEST, + product_id=PRODUCT_ID_TEST, + project_id='mock-project-id', + task_id='id', + ) + result = op.execute(None) + self.assertEqual(PRODUCT_ID_TEST, result) + + +class CloudVisionProductGetTest(unittest.TestCase): + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook') + def test_minimal_green_path(self, mock_hook): + mock_hook.return_value.get_product.return_value = {} + op = CloudVisionProductGetOperator(location=LOCATION_TEST, product_id=PRODUCT_ID_TEST, task_id='id') + op.execute(context=None) + mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) + mock_hook.return_value.get_product.assert_called_once_with( + location=LOCATION_TEST, + product_id=PRODUCT_ID_TEST, + project_id=None, + retry=None, + timeout=None, + metadata=None, + ) + + +class CloudVisionProductUpdateTest(unittest.TestCase): + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook') + def test_minimal_green_path(self, mock_hook): + mock_hook.return_value.update_product.return_value = {} + op = CloudVisionProductUpdateOperator(location=LOCATION_TEST, product=PRODUCT_TEST, task_id='id') + op.execute(context=None) + mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) + mock_hook.return_value.update_product.assert_called_once_with( + location=LOCATION_TEST, + product=PRODUCT_TEST, + product_id=None, + project_id=None, + retry=None, + timeout=None, + metadata=None, + update_mask=None, + ) + + +class CloudVisionProductDeleteTest(unittest.TestCase): + @mock.patch('airflow.contrib.operators.gcp_vision_operator.CloudVisionHook') + def test_minimal_green_path(self, mock_hook): + mock_hook.return_value.delete_product.return_value = {} + op = CloudVisionProductDeleteOperator( + location=LOCATION_TEST, product_id=PRODUCT_ID_TEST, task_id='id' + ) + op.execute(context=None) + mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) + mock_hook.return_value.delete_product.assert_called_once_with( + location=LOCATION_TEST, + product_id=PRODUCT_ID_TEST, + project_id=None, + retry=None, + timeout=None, + metadata=None, + ) diff --git a/tests/contrib/operators/test_gcp_vision_operator_system.py b/tests/contrib/operators/test_gcp_vision_operator_system.py new file mode 100644 index 0000000000000..2b75642d6f3a3 --- /dev/null +++ b/tests/contrib/operators/test_gcp_vision_operator_system.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +from tests.contrib.utils.base_gcp_system_test_case import DagGcpSystemTestCase, SKIP_TEST_WARNING +from tests.contrib.utils.gcp_authenticator import GCP_AI_KEY + + +@unittest.skipIf(DagGcpSystemTestCase.skip_check(GCP_AI_KEY), SKIP_TEST_WARNING) +class CloudVisionExampleDagsSystemTest(DagGcpSystemTestCase): + def __init__(self, method_name='runTest'): + super(CloudVisionExampleDagsSystemTest, self).__init__( + method_name, dag_id='example_gcp_vision', gcp_key=GCP_AI_KEY + ) + + def test_run_example_dag_function(self): + self._run_dag() diff --git a/tests/contrib/utils/gcp_authenticator.py b/tests/contrib/utils/gcp_authenticator.py index b26f23f9c7a8d..e8a200118f570 100644 --- a/tests/contrib/utils/gcp_authenticator.py +++ b/tests/contrib/utils/gcp_authenticator.py @@ -31,6 +31,7 @@ GCP_BIGTABLE_KEY = 'gcp_bigtable.json' GCP_SPANNER_KEY = 'gcp_spanner.json' GCP_GCS_KEY = 'gcp_gcs.json' +GCP_AI_KEY = 'gcp_ai.json' KEYPATH_EXTRA = 'extra__google_cloud_platform__key_path' KEYFILE_DICT_EXTRA = 'extra__google_cloud_platform__keyfile_dict' From 6c64b1da69355050b41d32622ab83d935f5fbbcf Mon Sep 17 00:00:00 2001 From: zhongjiajie Date: Sat, 23 Feb 2019 00:12:21 +0800 Subject: [PATCH 0127/1104] [AIRFLOW-3741] Add extra config to Oracle hook (#4584) Add extra config to Oracle hook including encoding, mode, threaded etc --- airflow/hooks/oracle_hook.py | 78 +++++++++- docs/howto/manage-connections.rst | 69 +++++++++ tests/hooks/test_oracle_hook.py | 242 ++++++++++++++++++++++++++++++ 3 files changed, 384 insertions(+), 5 deletions(-) create mode 100644 tests/hooks/test_oracle_hook.py diff --git a/airflow/hooks/oracle_hook.py b/airflow/hooks/oracle_hook.py index ae6aa0f039cf6..202208084f32d 100644 --- a/airflow/hooks/oracle_hook.py +++ b/airflow/hooks/oracle_hook.py @@ -49,22 +49,66 @@ def get_conn(self): You can set these parameters in the extra fields of your connection as in ``{ "dsn":"some.host.address" , "service_name":"some.service.name" }`` + see more param detail in + `cx_Oracle.connect `_ """ conn = self.get_connection(self.oracle_conn_id) + conn_config = { + 'user': conn.login, + 'password': conn.password + } dsn = conn.extra_dejson.get('dsn', None) sid = conn.extra_dejson.get('sid', None) mod = conn.extra_dejson.get('module', None) service_name = conn.extra_dejson.get('service_name', None) + port = conn.port if conn.port else 1521 if dsn and sid and not service_name: - dsn = cx_Oracle.makedsn(dsn, conn.port, sid) - conn = cx_Oracle.connect(conn.login, conn.password, dsn=dsn) + conn_config['dsn'] = cx_Oracle.makedsn(dsn, port, sid) elif dsn and service_name and not sid: - dsn = cx_Oracle.makedsn(dsn, conn.port, service_name=service_name) - conn = cx_Oracle.connect(conn.login, conn.password, dsn=dsn) + conn_config['dsn'] = cx_Oracle.makedsn(dsn, port, service_name=service_name) else: - conn = cx_Oracle.connect(conn.login, conn.password, conn.host) + conn_config['dsn'] = conn.host + if 'encoding' in conn.extra_dejson: + conn_config['encoding'] = conn.extra_dejson.get('encoding') + # if `encoding` is specific but `nencoding` is not + # `nencoding` should use same values as `encoding` to set encoding, inspired by + # https://github.com/oracle/python-cx_Oracle/issues/157#issuecomment-371877993 + if 'nencoding' not in conn.extra_dejson: + conn_config['nencoding'] = conn.extra_dejson.get('encoding') + if 'nencoding' in conn.extra_dejson: + conn_config['nencoding'] = conn.extra_dejson.get('nencoding') + if 'threaded' in conn.extra_dejson: + conn_config['threaded'] = conn.extra_dejson.get('threaded') + if 'events' in conn.extra_dejson: + conn_config['events'] = conn.extra_dejson.get('events') + + mode = conn.extra_dejson.get('mode', '').lower() + if mode == 'sysdba': + conn_config['mode'] = cx_Oracle.SYSDBA + elif mode == 'sysasm': + conn_config['mode'] = cx_Oracle.SYSASM + elif mode == 'sysoper': + conn_config['mode'] = cx_Oracle.SYSOPER + elif mode == 'sysbkp': + conn_config['mode'] = cx_Oracle.SYSBKP + elif mode == 'sysdgd': + conn_config['mode'] = cx_Oracle.SYSDGD + elif mode == 'syskmt': + conn_config['mode'] = cx_Oracle.SYSKMT + elif mode == 'sysrac': + conn_config['mode'] = cx_Oracle.SYSRAC + + purity = conn.extra_dejson.get('purity', '').lower() + if purity == 'new': + conn_config['purity'] = cx_Oracle.ATTR_PURITY_NEW + elif purity == 'self': + conn_config['purity'] = cx_Oracle.ATTR_PURITY_SELF + elif purity == 'default': + conn_config['purity'] = cx_Oracle.ATTR_PURITY_DEFAULT + + conn = cx_Oracle.connect(**conn_config) if mod is not None: conn.module = mod @@ -80,6 +124,18 @@ def insert_rows(self, table, rows, target_fields=None, commit_every=1000): - Replace NaN values with NULL using `numpy.nan_to_num` (not using `is_nan()` because of input types error for strings) - Coerce datetime cells to Oracle DATETIME format during insert + + :param table: target Oracle table, use dot notation to target a + specific database + :type table: str + :param rows: the rows to insert into the table + :type rows: iterable of tuples + :param target_fields: the names of the columns to fill in the table + :type target_fields: iterable of str + :param commit_every: the maximum number of rows to insert in one transaction + Default 1000, Set greater than 0. + Set 1 to insert each row in each single transaction + :type commit_every: int """ if target_fields: target_fields = ', '.join(target_fields) @@ -130,6 +186,18 @@ def bulk_insert_rows(self, table, rows, target_fields=None, commit_every=5000): A performant bulk insert for cx_Oracle that uses prepared statements via `executemany()`. For best performance, pass in `rows` as an iterator. + + :param table: target Oracle table, use dot notation to target a + specific database + :type table: str + :param rows: the rows to insert into the table + :type rows: iterable of tuples + :param target_fields: the names of the columns to fill in the table, default None. + If None, each rows should have some order as table columns name + :type target_fields: iterable of str Or None + :param commit_every: the maximum number of rows to insert in one transaction + Default 5000. Set greater than 0. Set 1 to insert each row in each transaction + :type commit_every: int """ conn = self.get_conn() cursor = conn.cursor() diff --git a/docs/howto/manage-connections.rst b/docs/howto/manage-connections.rst index 3f5433ac81a77..4c839d7fd07c7 100644 --- a/docs/howto/manage-connections.rst +++ b/docs/howto/manage-connections.rst @@ -374,6 +374,75 @@ Extra (optional) postgresql://postgres_user:XXXXXXXXXXXX@1.1.1.1:5432/postgresdb?sslmode=verify-ca&sslcert=%2Ftmp%2Fclient-cert.pem&sslkey=%2Ftmp%2Fclient-key.pem&sslrootcert=%2Ftmp%2Fserver-ca.pem +Oracle +~~~~~~~~ +The Oracle connection type provides connection to a Oracle database. + +Configuring the Connection +'''''''''''''''''''''''''' +Dsn (required) + The Data Source Name. The host address for the Oracle server. + +Sid (optional) + The Oracle System ID. The uniquely identify a particular database on a system. + +Service_name (optional) + The db_unique_name of the database. + +Port (optional) + The port for the Oracle server, Default 1521. + +Login (required) + Specify the user name to connect. + +Password (required) + Specify the password to connect. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Oracle + connection. The following parameters are supported: + + * **encoding** - The encoding to use for regular database strings. If not specified, + the environment variable `NLS_LANG` is used. If the environment variable `NLS_LANG` + is not set, `ASCII` is used. + * **nencoding** - The encoding to use for national character set database strings. + If not specified, the environment variable `NLS_NCHAR` is used. If the environment + variable `NLS_NCHAR` is not used, the environment variable `NLS_LANG` is used instead, + and if the environment variable `NLS_LANG` is not set, `ASCII` is used. + * **threaded** - Whether or not Oracle should wrap accesses to connections with a mutex. + Default value is False. + * **events** - Whether or not to initialize Oracle in events mode. + * **mode** - one of `sysdba`, `sysasm`, `sysoper`, `sysbkp`, `sysdgd`, `syskmt` or `sysrac` + which are defined at the module level, Default mode is connecting. + * **purity** - one of `new`, `self`, `default`. Specify the session acquired from the pool. + configuration parameter. + + More details on all Oracle connect parameters supported can be found in + `cx_Oracle documentation `_. + + Example "extras" field: + + .. code-block:: json + + { + "encoding": "UTF-8", + "nencoding": "UTF-8", + "threaded": false, + "events": false, + "mode": "sysdba", + "purity": "new" + } + + When specifying the connection as URI (in AIRFLOW_CONN_* variable) you should specify it + following the standard syntax of DB connections, where extras are passed as parameters + of the URI (note that all components of the URI should be URL-encoded). + + For example: + + .. code-block:: bash + + oracle://oracle_user:XXXXXXXXXXXX@1.1.1.1:1521?encoding=UTF-8&nencoding=UTF-8&threaded=False&events=False&mode=sysdba&purity=new + Cloudsql ~~~~~~~~ The gcpcloudsql:// connection is used by diff --git a/tests/hooks/test_oracle_hook.py b/tests/hooks/test_oracle_hook.py new file mode 100644 index 0000000000000..982ba2b17089e --- /dev/null +++ b/tests/hooks/test_oracle_hook.py @@ -0,0 +1,242 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json +import unittest +from datetime import datetime + +try: + import cx_Oracle +except ImportError: + cx_Oracle = None +import mock +import numpy + +from airflow.hooks.oracle_hook import OracleHook +from airflow.models.connection import Connection + + +@unittest.skipIf(cx_Oracle is None, 'cx_Oracle package not present') +class TestOracleHookConn(unittest.TestCase): + + def setUp(self): + super(TestOracleHookConn, self).setUp() + + self.connection = Connection( + login='login', + password='password', + host='host', + port=1521 + ) + + self.db_hook = OracleHook() + self.db_hook.get_connection = mock.Mock() + self.db_hook.get_connection.return_value = self.connection + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_host(self, mock_connect): + self.db_hook.get_conn() + mock_connect.assert_called_once() + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['user'], 'login') + self.assertEqual(kwargs['password'], 'password') + self.assertEqual(kwargs['dsn'], 'host') + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_sid(self, mock_connect): + dsn_sid = {'dsn': 'dsn', 'sid': 'sid'} + self.connection.extra = json.dumps(dsn_sid) + self.db_hook.get_conn() + mock_connect.assert_called_once() + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['dsn'], + cx_Oracle.makedsn(dsn_sid['dsn'], + self.connection.port, dsn_sid['sid'])) + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_service_name(self, mock_connect): + dsn_service_name = {'dsn': 'dsn', 'service_name': 'service_name'} + self.connection.extra = json.dumps(dsn_service_name) + self.db_hook.get_conn() + mock_connect.assert_called_once() + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['dsn'], cx_Oracle.makedsn( + dsn_service_name['dsn'], self.connection.port, + service_name=dsn_service_name['service_name'])) + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_encoding_without_nencoding(self, mock_connect): + self.connection.extra = json.dumps({'encoding': 'UTF-8'}) + self.db_hook.get_conn() + mock_connect.assert_called_once() + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['encoding'], 'UTF-8') + self.assertEqual(kwargs['nencoding'], 'UTF-8') + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_encoding_with_nencoding(self, mock_connect): + self.connection.extra = json.dumps({'encoding': 'UTF-8', 'nencoding': 'gb2312'}) + self.db_hook.get_conn() + mock_connect.assert_called_once() + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['encoding'], 'UTF-8') + self.assertEqual(kwargs['nencoding'], 'gb2312') + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_nencoding(self, mock_connect): + self.connection.extra = json.dumps({'nencoding': 'UTF-8'}) + self.db_hook.get_conn() + mock_connect.assert_called_once() + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertNotIn('encoding', kwargs) + self.assertEqual(kwargs['nencoding'], 'UTF-8') + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_mode(self, mock_connect): + mode = { + 'sysdba': cx_Oracle.SYSDBA, + 'sysasm': cx_Oracle.SYSASM, + 'sysoper': cx_Oracle.SYSOPER, + 'sysbkp': cx_Oracle.SYSBKP, + 'sysdgd': cx_Oracle.SYSDGD, + 'syskmt': cx_Oracle.SYSKMT, + } + first = True + for m in mode: + self.connection.extra = json.dumps({'mode': m}) + self.db_hook.get_conn() + if first: + mock_connect.assert_called_once() + first = False + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['mode'], mode.get(m)) + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_threaded(self, mock_connect): + self.connection.extra = json.dumps({'threaded': True}) + self.db_hook.get_conn() + mock_connect.assert_called_once() + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['threaded'], True) + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_events(self, mock_connect): + self.connection.extra = json.dumps({'events': True}) + self.db_hook.get_conn() + mock_connect.assert_called_once() + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['events'], True) + + @mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect') + def test_get_conn_purity(self, mock_connect): + purity = { + 'new': cx_Oracle.ATTR_PURITY_NEW, + 'self': cx_Oracle.ATTR_PURITY_SELF, + 'default': cx_Oracle.ATTR_PURITY_DEFAULT + } + first = True + for p in purity: + self.connection.extra = json.dumps({'purity': p}) + self.db_hook.get_conn() + if first: + mock_connect.assert_called_once() + first = False + args, kwargs = mock_connect.call_args + self.assertEqual(args, ()) + self.assertEqual(kwargs['purity'], purity.get(p)) + + +@unittest.skipIf(cx_Oracle is None, 'cx_Oracle package not present') +class TestOracleHook(unittest.TestCase): + def setUp(self): + super(TestOracleHook, self).setUp() + + self.cur = mock.MagicMock() + self.conn = mock.MagicMock() + self.conn.cursor.return_value = self.cur + conn = self.conn + + class UnitTestOracleHook(OracleHook): + conn_name_attr = 'test_conn_id' + + def get_conn(self): + return conn + + self.db_hook = UnitTestOracleHook() + + def test_run_without_parameters(self): + sql = 'SQL' + self.db_hook.run(sql) + self.cur.execute.assert_called_once_with(sql) + self.conn.commit.assert_called() + + def test_run_with_parameters(self): + sql = 'SQL' + param = ('p1', 'p2') + self.db_hook.run(sql, parameters=param) + self.cur.execute.assert_called_once_with(sql, param) + self.conn.commit.assert_called() + + def test_insert_rows_with_fields(self): + rows = [("'basestr_with_quote", None, numpy.NAN, + numpy.datetime64('2019-01-24T01:02:03'), + datetime(2019, 1, 24), 1, 10.24, 'str')] + target_fields = ['basestring', 'none', 'numpy_nan', 'numpy_datetime64', + 'datetime', 'int', 'float', 'str'] + self.db_hook.insert_rows('table', rows, target_fields) + self.cur.execute.assert_called_once_with( + "INSERT /*+ APPEND */ INTO table " + "(basestring, none, numpy_nan, numpy_datetime64, datetime, int, float, str) " + "VALUES ('''basestr_with_quote',NULL,NULL,'2019-01-24T01:02:03'," + "to_date('2019-01-24 00:00:00','YYYY-MM-DD HH24:MI:SS'),1,10.24,'str')") + + def test_insert_rows_without_fields(self): + rows = [("'basestr_with_quote", None, numpy.NAN, + numpy.datetime64('2019-01-24T01:02:03'), + datetime(2019, 1, 24), 1, 10.24, 'str')] + self.db_hook.insert_rows('table', rows) + self.cur.execute.assert_called_once_with( + "INSERT /*+ APPEND */ INTO table " + " VALUES ('''basestr_with_quote',NULL,NULL,'2019-01-24T01:02:03'," + "to_date('2019-01-24 00:00:00','YYYY-MM-DD HH24:MI:SS'),1,10.24,'str')") + + def test_bulk_insert_rows_with_fields(self): + rows = [(1, 2, 3), (4, 5, 6), (7, 8, 9)] + target_fields = ['col1', 'col2', 'col3'] + self.db_hook.bulk_insert_rows('table', rows, target_fields) + self.cur.prepare.assert_called_once_with( + "insert into table (col1, col2, col3) values (:1, :2, :3)") + self.cur.executemany.assert_called_once_with(None, rows) + + def test_bulk_insert_rows_with_commit_every(self): + rows = [(1, 2, 3), (4, 5, 6), (7, 8, 9)] + target_fields = ['col1', 'col2', 'col3'] + self.db_hook.bulk_insert_rows('table', rows, target_fields, commit_every=2) + self.cur.prepare.assert_called_with( + "insert into table (col1, col2, col3) values (:1, :2, :3)") + self.cur.executemany.assert_called_with(None, rows[2:]) From 88e3e721d4c8295edad9e328f64110ff01d772d5 Mon Sep 17 00:00:00 2001 From: Dane Laban Date: Sat, 23 Feb 2019 10:35:00 +1100 Subject: [PATCH 0128/1104] [AIRFLOW-3924] Fix try number in alert emails (#4741) Alert emails sent via email_alert() have the correct try number in the body of the text. Add a test to ensure the first email sent says `Try 1`. --- airflow/models/__init__.py | 4 +++- tests/models.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 90b184249174d..e9e2dfd9c2660 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -1718,10 +1718,12 @@ def render_templates(self): def email_alert(self, exception): exception_html = str(exception).replace('\n', '
') jinja_context = self.get_template_context() + # This function is called after changing the state + # from State.RUNNING so need to subtract 1 from self.try_number. jinja_context.update(dict( exception=exception, exception_html=exception_html, - try_number=self.try_number, + try_number=self.try_number - 1, max_tries=self.max_tries)) jinja_env = self.task.get_template_env() diff --git a/tests/models.py b/tests/models.py index 364a9e78dbd70..69980048730c9 100644 --- a/tests/models.py +++ b/tests/models.py @@ -2819,6 +2819,7 @@ def test_email_alert(self, mock_send_email): self.assertEqual(email, 'to') self.assertIn('test_email_alert', title) self.assertIn('test_email_alert', body) + self.assertIn('Try 1', body) @patch('airflow.models.send_email') def test_email_alert_with_config(self, mock_send_email): From 36b38d83e0cd409f47c057545d76d32b2a03916b Mon Sep 17 00:00:00 2001 From: Ryan Yuan Date: Sat, 23 Feb 2019 15:41:05 +1100 Subject: [PATCH 0129/1104] [AIRFLOW-3896] Add running command logging back to SSHOperator (#4716) * [AIRFLOW-3896] Add running command logging back to SSHOperator Add the SSH command logging back to SSHOperator * [AIRFLOW-3896] Add running command logging back to SSHOperator Change self.logger to self.log --- airflow/contrib/operators/ssh_operator.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/airflow/contrib/operators/ssh_operator.py b/airflow/contrib/operators/ssh_operator.py index 6e9adc1e5af18..a285914625a23 100644 --- a/airflow/contrib/operators/ssh_operator.py +++ b/airflow/contrib/operators/ssh_operator.py @@ -95,6 +95,8 @@ def execute(self, context): if self.command.startswith('sudo'): get_pty = True + self.log.info("Running command: %s", self.command) + # set timeout taken as params stdin, stdout, stderr = ssh_client.exec_command(command=self.command, get_pty=get_pty, From 4e88726a8d5de2b71057f9906a9bd6f2747ae6f1 Mon Sep 17 00:00:00 2001 From: Joshua Carp Date: Sat, 23 Feb 2019 11:55:34 -0500 Subject: [PATCH 0130/1104] [AIRFLOW-3932] Optionally skip dag discovery heuristic. (#4746) --- UPDATING.md | 6 ++++++ airflow/config_templates/default_airflow.cfg | 4 ++++ airflow/models/__init__.py | 5 ++++- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/UPDATING.md b/UPDATING.md index 512b66bbd967e..45674a6f71c4d 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -24,6 +24,12 @@ assists users migrating to a new version. ## Airflow Master +### New `dag_discovery_safe_mode` config option + +If `dag_discovery_safe_mode` is enabled, only check files for DAGs if +they contain the strings "airflow" and "DAG". For backwards +compatibility, this option is enabled by default. + ### Removed deprecated import mechanism The deprecated import mechanism has been removed so the import of modules becomes more consistent and explicit. diff --git a/airflow/config_templates/default_airflow.cfg b/airflow/config_templates/default_airflow.cfg index fff32a6694942..81e0c41c4508e 100644 --- a/airflow/config_templates/default_airflow.cfg +++ b/airflow/config_templates/default_airflow.cfg @@ -182,6 +182,10 @@ dag_run_conf_overrides_params = False # Worker initialisation check to validate Metadata Database connection worker_precheck = False +# When discovering DAGs, ignore any files that don't contain the strings `DAG` and `airflow`. +dag_discovery_safe_mode = True + + [cli] # In what way should the cli access the API. The LocalClient will use the # database directly, while the json_client will use the api running on the diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index e9e2dfd9c2660..1161066288a21 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -559,7 +559,10 @@ def collect_dags( stats = [] FileLoadStat = namedtuple( 'FileLoadStat', "file duration dag_num task_num dags") - for filepath in list_py_file_paths(dag_folder, include_examples=include_examples): + + safe_mode = configuration.conf.getboolean('core', 'dag_discovery_safe_mode') + for filepath in list_py_file_paths(dag_folder, safe_mode=safe_mode, + include_examples=include_examples): try: ts = timezone.utcnow() found_dags = self.process_file( From e36bdef0b34c16def20ecbb8248950070eb5fa33 Mon Sep 17 00:00:00 2001 From: Bolke de Bruin Date: Tue, 15 Jan 2019 20:37:08 +0100 Subject: [PATCH 0131/1104] [AIRFLOW-3697] Vendorize nvd3 and slugify nvd3 has a dependency on python-slugify which pulls in a GPL dependency by default, which we don't want. This commit brings in nvd3 0.15.0 and slugify 2.0.1 WITH NO CHANGES - those will come in the next commit --- .flake8 | 1 + .rat-excludes | 3 + .travis.yml | 1 - LICENSE | 2 + MANIFEST.in | 3 +- NOTICE | 12 + airflow/_vendor/README | 13 + airflow/_vendor/__init__.py | 18 + airflow/_vendor/nvd3/LICENSE | 24 + airflow/_vendor/nvd3/NVD3Chart.py | 505 ++++++++++++++++++ airflow/_vendor/nvd3/__init__.py | 29 + airflow/_vendor/nvd3/cumulativeLineChart.py | 104 ++++ airflow/_vendor/nvd3/discreteBarChart.py | 91 ++++ airflow/_vendor/nvd3/ipynb.py | 91 ++++ airflow/_vendor/nvd3/lineChart.py | 120 +++++ airflow/_vendor/nvd3/linePlusBarChart.py | 131 +++++ airflow/_vendor/nvd3/lineWithFocusChart.py | 105 ++++ airflow/_vendor/nvd3/multiBarChart.py | 95 ++++ .../_vendor/nvd3/multiBarHorizontalChart.py | 100 ++++ airflow/_vendor/nvd3/pieChart.py | 101 ++++ airflow/_vendor/nvd3/scatterChart.py | 121 +++++ airflow/_vendor/nvd3/stackedAreaChart.py | 99 ++++ airflow/_vendor/nvd3/templates/base.html | 35 ++ airflow/_vendor/nvd3/templates/content.html | 123 +++++ .../nvd3/templates/cumulativelinechart.html | 11 + .../nvd3/templates/discretebarchart.html | 31 ++ .../nvd3/templates/linebarwfocuschart.html | 60 +++ airflow/_vendor/nvd3/templates/linechart.html | 47 ++ .../nvd3/templates/lineplusbarchart.html | 44 ++ .../nvd3/templates/linewfocuschart.html | 10 + .../_vendor/nvd3/templates/multibarchart.html | 10 + .../templates/multibarcharthorizontal.html | 10 + airflow/_vendor/nvd3/templates/page.html | 12 + airflow/_vendor/nvd3/templates/piechart.html | 80 +++ .../_vendor/nvd3/templates/scatterchart.html | 52 ++ .../nvd3/templates/stackedareachart.html | 7 + airflow/_vendor/nvd3/translator.py | 71 +++ airflow/_vendor/slugify/LICENSE | 21 + airflow/_vendor/slugify/__init__.py | 6 + airflow/_vendor/slugify/slugify.py | 188 +++++++ licenses/LICENSE-python-nvd3.txt | 24 + licenses/LICENSE-python-slugify.txt | 21 + 42 files changed, 2630 insertions(+), 2 deletions(-) create mode 100644 airflow/_vendor/README create mode 100644 airflow/_vendor/__init__.py create mode 100644 airflow/_vendor/nvd3/LICENSE create mode 100644 airflow/_vendor/nvd3/NVD3Chart.py create mode 100755 airflow/_vendor/nvd3/__init__.py create mode 100644 airflow/_vendor/nvd3/cumulativeLineChart.py create mode 100644 airflow/_vendor/nvd3/discreteBarChart.py create mode 100644 airflow/_vendor/nvd3/ipynb.py create mode 100644 airflow/_vendor/nvd3/lineChart.py create mode 100644 airflow/_vendor/nvd3/linePlusBarChart.py create mode 100644 airflow/_vendor/nvd3/lineWithFocusChart.py create mode 100644 airflow/_vendor/nvd3/multiBarChart.py create mode 100644 airflow/_vendor/nvd3/multiBarHorizontalChart.py create mode 100644 airflow/_vendor/nvd3/pieChart.py create mode 100644 airflow/_vendor/nvd3/scatterChart.py create mode 100644 airflow/_vendor/nvd3/stackedAreaChart.py create mode 100644 airflow/_vendor/nvd3/templates/base.html create mode 100644 airflow/_vendor/nvd3/templates/content.html create mode 100644 airflow/_vendor/nvd3/templates/cumulativelinechart.html create mode 100644 airflow/_vendor/nvd3/templates/discretebarchart.html create mode 100644 airflow/_vendor/nvd3/templates/linebarwfocuschart.html create mode 100644 airflow/_vendor/nvd3/templates/linechart.html create mode 100644 airflow/_vendor/nvd3/templates/lineplusbarchart.html create mode 100644 airflow/_vendor/nvd3/templates/linewfocuschart.html create mode 100644 airflow/_vendor/nvd3/templates/multibarchart.html create mode 100644 airflow/_vendor/nvd3/templates/multibarcharthorizontal.html create mode 100644 airflow/_vendor/nvd3/templates/page.html create mode 100644 airflow/_vendor/nvd3/templates/piechart.html create mode 100644 airflow/_vendor/nvd3/templates/scatterchart.html create mode 100644 airflow/_vendor/nvd3/templates/stackedareachart.html create mode 100644 airflow/_vendor/nvd3/translator.py create mode 100644 airflow/_vendor/slugify/LICENSE create mode 100644 airflow/_vendor/slugify/__init__.py create mode 100644 airflow/_vendor/slugify/slugify.py create mode 100644 licenses/LICENSE-python-nvd3.txt create mode 100644 licenses/LICENSE-python-slugify.txt diff --git a/.flake8 b/.flake8 index e2ba4cbf6f631..368fdb4331a12 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,4 @@ [flake8] max-line-length = 110 ignore = E731,W504 +exclude = .svn,CVS,.bzr,.hg,.git,__pycache__,.tox,.eggs,*.egg,*/_vendor/* diff --git a/.rat-excludes b/.rat-excludes index 786920076dd57..fb9361f7f39fc 100644 --- a/.rat-excludes +++ b/.rat-excludes @@ -59,3 +59,6 @@ coverage.xml rat-results.txt apache-airflow-.*\+source.tar.gz.* apache-airflow-.*\+bin.tar.gz.* + +# vendored modules +_vendor/* diff --git a/.travis.yml b/.travis.yml index 59e87d7fb192d..68b8f61e5fe1f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,7 +24,6 @@ python: env: global: - DOCKER_COMPOSE_VERSION=1.20.0 - - SLUGIFY_USES_TEXT_UNIDECODE=yes - TRAVIS_CACHE=$HOME/.travis_cache/ matrix: - TOX_ENV=py27-backend_mysql-env_docker diff --git a/LICENSE b/LICENSE index debddc0d7bb79..e3335acb8014a 100644 --- a/LICENSE +++ b/LICENSE @@ -240,6 +240,8 @@ The text of each license is also included at licenses/LICENSE-[project].txt. (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/) (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock) (MIT License) MomentJS v2.22.2 (http://momentjs.com/) + (MIT License) python-slugify v2.0.1 (https://github.com/un33k/python-slugify) + (MIT License) python-nvd3 v0.15.0 (https://github.com/areski/python-nvd3) ======================================================================== BSD 2-Clause licenses diff --git a/MANIFEST.in b/MANIFEST.in index 2ae1b2a434680..755f1548c8f07 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -24,9 +24,10 @@ graft licenses/ graft airflow/www graft airflow/www/static graft airflow/www/templates -graft airflow/www/translations +graft airflow/_vendor/ include airflow/alembic.ini graft scripts/systemd graft scripts/upstart graft airflow/config_templates recursive-exclude airflow/www/node_modules * +global-exclude __pycache__ *.pyc diff --git a/NOTICE b/NOTICE index b478303ed644d..130353a3b8b75 100644 --- a/NOTICE +++ b/NOTICE @@ -17,3 +17,15 @@ This product contains a modified portion of 'Hue' developed by Cloudera, Inc. (https://github.com/cloudera/hue/). * Copyright 2009-2017 Cloudera Inc. + +python-slugify: +--------------- + +* Copyright (c) Val Neekman @ Neekware Inc. http://neekware.com + +python-nvd3: +------------ + +* Copyright (c) 2013 Arezqui Belaid and other contributors + + diff --git a/airflow/_vendor/README b/airflow/_vendor/README new file mode 100644 index 0000000000000..a79ea89eae536 --- /dev/null +++ b/airflow/_vendor/README @@ -0,0 +1,13 @@ +Original files in this directory were created with the following commands:: + + mkdir -p slugify/ + curl -fsSL -O https://files.pythonhosted.org/packages/1f/9c/8b07d625e9c9df567986d887f0375075abb1923e49d074a7803cd1527dae/python-slugify-2.0.1.tar.gz + tar -xzf python-slugify-*.tar.gz --strip-components=2 -C slugify/ '*/slugify/*' + tar -xzf python-slugify-*.tar.gz --strip-components=1 -C slugify/ '*/LICENSE' + rm *.tar.gz + + mkdir -p nvd3/ + curl -fsSL -O https://files.pythonhosted.org/packages/0b/aa/97165daa6e319409c5c2582e62736a7353bda3c90d90fdcb0b11e116dd2d/python-nvd3-0.15.0.tar.gz + tar -xzf python-nvd3-*.tar.gz --strip-components=2 -C nvd3/ '*/nvd3/*' + tar -xzf python-nvd3-*.tar.gz --strip-components=1 -C nvd3/ '*/LICENSE' + rm *.tar.gz diff --git a/airflow/_vendor/__init__.py b/airflow/_vendor/__init__.py new file mode 100644 index 0000000000000..114d189da14ab --- /dev/null +++ b/airflow/_vendor/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/_vendor/nvd3/LICENSE b/airflow/_vendor/nvd3/LICENSE new file mode 100644 index 0000000000000..1add6249e57b4 --- /dev/null +++ b/airflow/_vendor/nvd3/LICENSE @@ -0,0 +1,24 @@ +The MIT License (MIT) + +Python-nvd3 + +Copyright (c) 2013 Arezqui Belaid and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/airflow/_vendor/nvd3/NVD3Chart.py b/airflow/_vendor/nvd3/NVD3Chart.py new file mode 100644 index 0000000000000..666993638de65 --- /dev/null +++ b/airflow/_vendor/nvd3/NVD3Chart.py @@ -0,0 +1,505 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from __future__ import unicode_literals +from optparse import OptionParser +from jinja2 import Environment, PackageLoader +from slugify import slugify +try: + import simplejson as json +except ImportError: + import json + +CONTENT_FILENAME = "./content.html" +PAGE_FILENAME = "./page.html" + + +pl = PackageLoader('nvd3', 'templates') +jinja2_env = Environment(lstrip_blocks=True, trim_blocks=True, loader=pl) + +template_content = jinja2_env.get_template(CONTENT_FILENAME) +template_page = jinja2_env.get_template(PAGE_FILENAME) + + +def stab(tab=1): + """ + create space tabulation + """ + return ' ' * 4 * tab + + +class NVD3Chart(object): + """ + NVD3Chart Base class. + """ + #: chart count + count = 0 + #: directory holding the assets (bower_components) + assets_directory = './bower_components/' + + # this attribute is overriden by children of this + # class + CHART_FILENAME = None + template_environment = Environment(lstrip_blocks=True, trim_blocks=True, + loader=pl) + + def __init__(self, **kwargs): + """ + This is the base class for all the charts. The following keywords are + accepted: + + :keyword: **display_container** - default: ``True`` + :keyword: **jquery_on_ready** - default: ``False`` + :keyword: **charttooltip_dateformat** - default: ``'%d %b %Y'`` + :keyword: **name** - default: the class name + ``model`` - set the model (e.g. ``pieChart``, ` + ``LineWithFocusChart``, ``MultiBarChart``). + :keyword: **color_category** - default - ``None`` + :keyword: **color_list** - default - ``None`` + used by pieChart (e.g. ``['red', 'blue', 'orange']``) + :keyword: **margin_bottom** - default - ``20`` + :keyword: **margin_left** - default - ``60`` + :keyword: **margin_right** - default - ``60`` + :keyword: **margin_top** - default - ``30`` + :keyword: **height** - default - ``''`` + :keyword: **width** - default - ``''`` + :keyword: **stacked** - default - ``False`` + :keyword: **focus_enable** - default - ``False`` + :keyword: **resize** - define - ``False`` + :keyword: **show_legend** - default - ``True`` + :keyword: **show_labels** - default - ``True`` + :keyword: **tag_script_js** - default - ``True`` + :keyword: **use_interactive_guideline** - default - ``False`` + :keyword: **chart_attr** - default - ``None`` + :keyword: **extras** - default - ``None`` + + Extra chart modifiers. Use this to modify different attributes of + the chart. + :keyword: **x_axis_date** - default - False + Signal that x axis is a date axis + :keyword: **date_format** - default - ``%x`` + see https://github.com/mbostock/d3/wiki/Time-Formatting + :keyword: **x_axis_format** - default - ``''``. + :keyword: **y_axis_format** - default - ``''``. + :keyword: **style** - default - ``''`` + Style modifiers for the DIV container. + :keyword: **color_category** - default - ``category10`` + + Acceptable values are nvd3 categories such as + ``category10``, ``category20``, ``category20c``. + """ + # set the model + self.model = self.__class__.__name__ #: The chart model, + + #: an Instance of Jinja2 template + self.template_page_nvd3 = template_page + self.template_content_nvd3 = template_content + self.series = [] + self.axislist = {} + # accepted keywords + self.display_container = kwargs.get('display_container', True) + self.charttooltip_dateformat = kwargs.get('charttooltip_dateformat', + '%d %b %Y') + self._slugify_name(kwargs.get('name', self.model)) + self.jquery_on_ready = kwargs.get('jquery_on_ready', False) + self.color_category = kwargs.get('color_category', None) + self.color_list = kwargs.get('color_list', None) + self.margin_bottom = kwargs.get('margin_bottom', 20) + self.margin_left = kwargs.get('margin_left', 60) + self.margin_right = kwargs.get('margin_right', 60) + self.margin_top = kwargs.get('margin_top', 30) + self.height = kwargs.get('height', '') + self.width = kwargs.get('width', '') + self.stacked = kwargs.get('stacked', False) + self.focus_enable = kwargs.get('focus_enable', False) + self.resize = kwargs.get('resize', False) + self.show_legend = kwargs.get('show_legend', True) + self.show_labels = kwargs.get('show_labels', True) + self.tag_script_js = kwargs.get('tag_script_js', True) + self.use_interactive_guideline = kwargs.get("use_interactive_guideline", + False) + self.chart_attr = kwargs.get("chart_attr", {}) + self.extras = kwargs.get('extras', None) + self.style = kwargs.get('style', '') + self.date_format = kwargs.get('date_format', '%x') + self.x_axis_date = kwargs.get('x_axis_date', False) + #: x-axis contain date format or not + # possible duplicate of x_axis_date + self.date_flag = kwargs.get('date_flag', False) + self.x_axis_format = kwargs.get('x_axis_format', '') + # Load remote JS assets or use the local bower assets? + self.remote_js_assets = kwargs.get('remote_js_assets', True) + + # None keywords attribute that should be modified by methods + # We should change all these to _attr + + self.htmlcontent = '' #: written by buildhtml + self.htmlheader = '' + #: Place holder for the graph (the HTML div) + #: Written by ``buildcontainer`` + self.container = u'' + #: Header for javascript code + self.containerheader = u'' + # CDN http://cdnjs.com/libraries/nvd3/ needs to make sure it's up to + # date + self.header_css = [ + '' % h for h in + ( + 'https://cdnjs.cloudflare.com/ajax/libs/nvd3/1.7.0/nv.d3.min.css' if self.remote_js_assets else self.assets_directory + 'nvd3/src/nv.d3.css', + ) + ] + + self.header_js = [ + '' % h for h in + ( + 'https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js' if self.remote_js_assets else self.assets_directory + 'd3/d3.min.js', + 'https://cdnjs.cloudflare.com/ajax/libs/nvd3/1.7.0/nv.d3.min.js' if self.remote_js_assets else self.assets_directory + 'nvd3/nv.d3.min.js' + ) + ] + + #: Javascript code as string + self.jschart = None + self.custom_tooltip_flag = False + self.tooltip_condition_string = '' + self.charttooltip = '' + self.serie_no = 1 + + def _slugify_name(self, name): + """Slufigy name with underscore""" + self.name = slugify(name, separator='_') + + def add_serie(self, y, x, name=None, extra=None, **kwargs): + """ + add serie - Series are list of data that will be plotted + y {1, 2, 3, 4, 5} / x {1, 2, 3, 4, 5} + + **Attributes**: + + * ``name`` - set Serie name + * ``x`` - x-axis data + * ``y`` - y-axis data + + kwargs: + + * ``shape`` - for scatterChart, you can set different shapes + (circle, triangle etc...) + * ``size`` - for scatterChart, you can set size of different shapes + * ``type`` - for multiChart, type should be bar + * ``bar`` - to display bars in Chart + * ``color_list`` - define list of colors which will be + used by pieChart + * ``color`` - set axis color + * ``disabled`` - + + extra: + + * ``tooltip`` - set tooltip flag + * ``date_format`` - set date_format for tooltip if x-axis is in + date format + + """ + if not name: + name = "Serie %d" % (self.serie_no) + + # For scatterChart shape & size fields are added in serie + if 'shape' in kwargs or 'size' in kwargs: + csize = kwargs.get('size', 1) + cshape = kwargs.get('shape', 'circle') + + serie = [{ + 'x': x[i], + 'y': j, + 'shape': cshape, + 'size': csize[i] if isinstance(csize, list) else csize + } for i, j in enumerate(y)] + else: + if self.model == 'pieChart': + serie = [{'label': x[i], 'value': y} for i, y in enumerate(y)] + else: + serie = [{'x': x[i], 'y': y} for i, y in enumerate(y)] + + data_keyvalue = {'values': serie, 'key': name} + + # multiChart + # Histogram type='bar' for the series + if 'type' in kwargs and kwargs['type']: + data_keyvalue['type'] = kwargs['type'] + + # Define on which Y axis the serie is related + # a chart can have 2 Y axis, left and right, by default only one Y Axis is used + if 'yaxis' in kwargs and kwargs['yaxis']: + data_keyvalue['yAxis'] = kwargs['yaxis'] + else: + if self.model != 'pieChart': + data_keyvalue['yAxis'] = '1' + + if 'bar' in kwargs and kwargs['bar']: + data_keyvalue['bar'] = 'true' + + if 'disabled' in kwargs and kwargs['disabled']: + data_keyvalue['disabled'] = 'true' + + if 'color' in kwargs and kwargs['color']: + data_keyvalue['color'] = kwargs['color'] + + if extra: + if self.model == 'pieChart': + if 'color_list' in extra and extra['color_list']: + self.color_list = extra['color_list'] + + if extra.get('date_format'): + self.charttooltip_dateformat = extra['date_format'] + + if extra.get('tooltip'): + self.custom_tooltip_flag = True + + if self.model != 'pieChart': + _start = extra['tooltip']['y_start'] + _end = extra['tooltip']['y_end'] + _start = ("'" + str(_start) + "' + ") if _start else '' + _end = (" + '" + str(_end) + "'") if _end else '' + + if self.model == 'linePlusBarChart': + if self.tooltip_condition_string: + self.tooltip_condition_string += stab(5) + self.tooltip_condition_string += stab(0) + "if(key.indexOf('" + name + "') > -1 ){\n" +\ + stab(6) + "var y = " + _start + " String(graph.point.y) " + _end + ";\n" +\ + stab(5) + "}\n" + elif self.model == 'cumulativeLineChart': + self.tooltip_condition_string += stab(0) + "if(key == '" + name + "'){\n" +\ + stab(6) + "var y = " + _start + " String(e) " + _end + ";\n" +\ + stab(5) + "}\n" + else: + self.tooltip_condition_string += stab(5) + "if(key == '" + name + "'){\n" +\ + stab(6) + "var y = " + _start + " String(graph.point.y) " + _end + ";\n" +\ + stab(5) + "}\n" + + if self.model == 'pieChart': + _start = extra['tooltip']['y_start'] + _end = extra['tooltip']['y_end'] + _start = ("'" + str(_start) + "' + ") if _start else '' + _end = (" + '" + str(_end) + "'") if _end else '' + self.tooltip_condition_string += "var y = " + _start + " String(y) " + _end + ";\n" + + # Increment series counter & append + self.serie_no += 1 + self.series.append(data_keyvalue) + + def add_chart_extras(self, extras): + """ + Use this method to add extra d3 properties to your chart. + For example, you want to change the text color of the graph:: + + chart = pieChart(name='pieChart', color_category='category20c', height=400, width=400) + + xdata = ["Orange", "Banana", "Pear", "Kiwi", "Apple", "Strawberry", "Pineapple"] + ydata = [3, 4, 0, 1, 5, 7, 3] + + extra_serie = {"tooltip": {"y_start": "", "y_end": " cal"}} + chart.add_serie(y=ydata, x=xdata, extra=extra_serie) + + The above code will create graph with a black text, the following will change it:: + + text_white="d3.selectAll('#pieChart text').style('fill', 'white');" + chart.add_chart_extras(text_white) + + The above extras will be appended to the java script generated. + + Alternatively, you can use the following initialization:: + + chart = pieChart(name='pieChart', + color_category='category20c', + height=400, width=400, + extras=text_white) + """ + self.extras = extras + + def set_graph_height(self, height): + """Set Graph height""" + self.height = str(height) + + def set_graph_width(self, width): + """Set Graph width""" + self.width = str(width) + + def set_containerheader(self, containerheader): + """Set containerheader""" + self.containerheader = containerheader + + def set_date_flag(self, date_flag=False): + """Set date flag""" + self.date_flag = date_flag + + def set_custom_tooltip_flag(self, custom_tooltip_flag): + """Set custom_tooltip_flag & date_flag""" + self.custom_tooltip_flag = custom_tooltip_flag + + def __str__(self): + """return htmlcontent""" + self.buildhtml() + return self.htmlcontent + + def buildcontent(self): + """Build HTML content only, no header or body tags. To be useful this + will usually require the attribute `juqery_on_ready` to be set which + will wrap the js in $(function(){};) + """ + self.buildcontainer() + # if the subclass has a method buildjs this method will be + # called instead of the method defined here + # when this subclass method is entered it does call + # the method buildjschart defined here + self.buildjschart() + self.htmlcontent = self.template_content_nvd3.render(chart=self) + + def buildhtml(self): + """Build the HTML page + Create the htmlheader with css / js + Create html page + Add Js code for nvd3 + """ + self.buildcontent() + self.content = self.htmlcontent + self.htmlcontent = self.template_page_nvd3.render(chart=self) + + # this is used by django-nvd3 + def buildhtmlheader(self): + """generate HTML header content""" + self.htmlheader = '' + # If the JavaScript assets have already been injected, don't bother re-sourcing them. + global _js_initialized + if '_js_initialized' not in globals() or not _js_initialized: + for css in self.header_css: + self.htmlheader += css + for js in self.header_js: + self.htmlheader += js + + def buildcontainer(self): + """generate HTML div""" + if self.container: + return + + # Create SVG div with style + if self.width: + if self.width[-1] != '%': + self.style += 'width:%spx;' % self.width + else: + self.style += 'width:%s;' % self.width + if self.height: + if self.height[-1] != '%': + self.style += 'height:%spx;' % self.height + else: + self.style += 'height:%s;' % self.height + if self.style: + self.style = 'style="%s"' % self.style + + self.container = self.containerheader + \ + '
\n' % (self.name, self.style) + + def buildjschart(self): + """generate javascript code for the chart""" + self.jschart = '' + + # add custom tooltip string in jschart + # default condition (if build_custom_tooltip is not called explicitly with date_flag=True) + if self.tooltip_condition_string == '': + self.tooltip_condition_string = 'var y = String(graph.point.y);\n' + + # Include data + self.series_js = json.dumps(self.series) + + def create_x_axis(self, name, label=None, format=None, date=False, custom_format=False): + """Create X-axis""" + axis = {} + if custom_format and format: + axis['tickFormat'] = format + elif format: + if format == 'AM_PM': + axis['tickFormat'] = "function(d) { return get_am_pm(parseInt(d)); }" + else: + axis['tickFormat'] = "d3.format(',%s')" % format + + if label: + axis['axisLabel'] = "'" + label + "'" + + # date format : see https://github.com/mbostock/d3/wiki/Time-Formatting + if date: + self.dateformat = format + axis['tickFormat'] = ("function(d) { return d3.time.format('%s')" + "(new Date(parseInt(d))) }\n" + "" % self.dateformat) + # flag is the x Axis is a date + if name[0] == 'x': + self.x_axis_date = True + + # Add new axis to list of axis + self.axislist[name] = axis + + # Create x2Axis if focus_enable + if name == "xAxis" and self.focus_enable: + self.axislist['x2Axis'] = axis + + def create_y_axis(self, name, label=None, format=None, custom_format=False): + """ + Create Y-axis + """ + axis = {} + + if custom_format and format: + axis['tickFormat'] = format + elif format: + axis['tickFormat'] = "d3.format(',%s')" % format + + if label: + axis['axisLabel'] = "'" + label + "'" + + # Add new axis to list of axis + self.axislist[name] = axis + + +class TemplateMixin(object): + """ + A mixin that override buildcontent. Instead of building the complex + content template we exploit Jinja2 inheritance. Thus each chart class + renders it's own chart template which inherits from content.html + """ + def buildcontent(self): + """Build HTML content only, no header or body tags. To be useful this + will usually require the attribute `juqery_on_ready` to be set which + will wrap the js in $(function(){};) + """ + self.buildcontainer() + # if the subclass has a method buildjs this method will be + # called instead of the method defined here + # when this subclass method is entered it does call + # the method buildjschart defined here + self.buildjschart() + self.htmlcontent = self.template_chart_nvd3.render(chart=self) + + +def _main(): + """ + Parse options and process commands + """ + # Parse arguments + usage = "usage: nvd3.py [options]" + parser = OptionParser(usage=usage, + version=("python-nvd3 - Charts generator with " + "nvd3.js and d3.js")) + parser.add_option("-q", "--quiet", + action="store_false", dest="verbose", default=True, + help="don't print messages to stdout") + + (options, args) = parser.parse_args() + + +if __name__ == '__main__': + _main() diff --git a/airflow/_vendor/nvd3/__init__.py b/airflow/_vendor/nvd3/__init__.py new file mode 100755 index 0000000000000..5b737b45361ad --- /dev/null +++ b/airflow/_vendor/nvd3/__init__.py @@ -0,0 +1,29 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +__version__ = '0.15.0' +__all__ = ['lineChart', 'pieChart', 'lineWithFocusChart', + 'stackedAreaChart', 'multiBarHorizontalChart', + 'linePlusBarChart', 'cumulativeLineChart', + 'scatterChart', 'discreteBarChart', 'multiBarChart'] + + +from .lineChart import lineChart +from .pieChart import pieChart +from .lineWithFocusChart import lineWithFocusChart +from .stackedAreaChart import stackedAreaChart +from .multiBarHorizontalChart import multiBarHorizontalChart +from .linePlusBarChart import linePlusBarChart +from .cumulativeLineChart import cumulativeLineChart +from .scatterChart import scatterChart +from .discreteBarChart import discreteBarChart +from .multiBarChart import multiBarChart +from . import ipynb diff --git a/airflow/_vendor/nvd3/cumulativeLineChart.py b/airflow/_vendor/nvd3/cumulativeLineChart.py new file mode 100644 index 0000000000000..d98d0867e4d99 --- /dev/null +++ b/airflow/_vendor/nvd3/cumulativeLineChart.py @@ -0,0 +1,104 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class cumulativeLineChart(TemplateMixin, NVD3Chart): + """ + A cumulative line chart is used when you have one important grouping representing + an ordered set of data and one value to show, summed over time. + + Python example:: + + from nvd3 import cumulativeLineChart + chart = cumulativeLineChart(name='cumulativeLineChart', x_is_date=True) + xdata = [1365026400000000, 1365026500000000, 1365026600000000] + ydata = [6, 5, 1] + y2data = [36, 55, 11] + + extra_serie = {"tooltip": {"y_start": "There are ", "y_end": " calls"}} + chart.add_serie(name="Serie 1", y=ydata, x=xdata, extra=extra_serie) + + extra_serie = {"tooltip": {"y_start": "", "y_end": " mins"}} + chart.add_serie(name="Serie 2", y=y2data, x=xdata, extra=extra_serie) + chart.buildhtml() + + Javascript generated: + + .. raw:: html + +
+ + + """ + + CHART_FILENAME = "./cumulativelinechart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(cumulativeLineChart, self).__init__(**kwargs) + self.model = 'cumulativeLineChart' + + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + + if kwargs.get('x_is_date', False): + self.set_date_flag(True) + self.create_x_axis('xAxis', + format=kwargs.get('x_axis_format', '%d %b %Y'), + date=True) + self.set_custom_tooltip_flag(True) + else: + self.create_x_axis('xAxis', format=kwargs.get( + 'x_axis_format', '.2f')) + + self.create_y_axis('yAxis', format=kwargs.get('y_axis_format', '.1%')) + + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/discreteBarChart.py b/airflow/_vendor/nvd3/discreteBarChart.py new file mode 100644 index 0000000000000..cf6c8a4a8ff4b --- /dev/null +++ b/airflow/_vendor/nvd3/discreteBarChart.py @@ -0,0 +1,91 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class discreteBarChart(TemplateMixin, NVD3Chart): + """ + A discrete bar chart or bar graph is a chart with rectangular bars with + lengths proportional to the values that they represent. + + Python example:: + + from nvd3 import discreteBarChart + chart = discreteBarChart(name='discreteBarChart', height=400, width=400) + + xdata = ["A", "B", "C", "D", "E", "F"] + ydata = [3, 4, 0, -3, 5, 7] + + chart.add_serie(y=ydata, x=xdata) + chart.buildhtml() + + Javascript generated: + + .. raw:: html + +
+ + + + """ + CHART_FILENAME = "./discretebarchart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(discreteBarChart, self).__init__(**kwargs) + self.model = 'discreteBarChart' + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + + if kwargs.get('x_is_date', False): + self.set_date_flag(True) + self.create_x_axis('xAxis', + format=kwargs.get('x_axis_format', + "%d %b %Y %H %S"), + date=True) + else: + self.create_x_axis('xAxis', format=None) + + self.create_y_axis('yAxis', format=kwargs.get('y_axis_format', ".0f")) + + self.set_custom_tooltip_flag(True) + + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/ipynb.py b/airflow/_vendor/nvd3/ipynb.py new file mode 100644 index 0000000000000..f421afc0a8a50 --- /dev/null +++ b/airflow/_vendor/nvd3/ipynb.py @@ -0,0 +1,91 @@ +''' +ipython compatability module for nvd3-python +This adds simple ipython compatibility to the nvd3-python package, without making any +major modifications to how the main package is structured. It utilizes the IPython +display-formatter functionality, as described at: +http://nbviewer.ipython.org/github/ipython/ipython/blob/master/examples/notebooks/Custom%20Display%20Logic.ipynb +For additional examples, see: +https://github.com/sympy/sympy/blob/master/sympy/interactive/printing.py +''' + +try: + _ip = get_ipython() +except: + _ip = None +if _ip and _ip.__module__.lower().startswith('ipy'): + global _js_initialized + _js_initialized = False + + def _print_html(chart): + '''Function to return the HTML code for the div container plus the javascript + to generate the chart. This function is bound to the ipython formatter so that + charts are displayed inline.''' + global _js_initialized + if not _js_initialized: + print('js not initialized - pausing to allow time for it to load...') + initialize_javascript() + import time + time.sleep(5) + chart.buildhtml() + return chart.htmlcontent + + def _setup_ipython_formatter(ip): + ''' Set up the ipython formatter to display HTML formatted output inline''' + from IPython import __version__ as IPython_version + from nvd3 import __all__ as nvd3_all + + if IPython_version >= '0.11': + html_formatter = ip.display_formatter.formatters['text/html'] + for chart_type in nvd3_all: + html_formatter.for_type_by_name('nvd3.' + chart_type, chart_type, _print_html) + + def initialize_javascript(d3_js_url='https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js', + nvd3_js_url='https://cdnjs.cloudflare.com/ajax/libs/nvd3/1.7.0/nv.d3.min.js', + nvd3_css_url='https://cdnjs.cloudflare.com/ajax/libs/nvd3/1.7.0/nv.d3.min.css', + use_remote=False): + '''Initialize the ipython notebook to be able to display nvd3 results. + by instructing IPython to load the nvd3 JS and css files, and the d3 JS file. + + by default, it looks for the files in your IPython Notebook working directory. + + Takes the following options: + + use_remote: use remote hosts for d3.js, nvd3.js, and nv.d3.css (default False) + * Note: the following options are ignored if use_remote is False: + nvd3_css_url: location of nvd3 css file (default https://cdnjs.cloudflare.com/ajax/libs/nvd3/1.7.0/nv.d3.min.css) + nvd3_js_url: location of nvd3 javascript file (default https://cdnjs.cloudflare.com/ajax/libs/nvd3/1.7.0/nv.d3.min.css) + d3_js_url: location of d3 javascript file (default https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js) + ''' + from IPython.display import display, Javascript, HTML + + if not use_remote: + # these file locations are for IPython 1.x, and will probably change when 2.x is released + d3_js_url = 'files/d3.v3.js' + nvd3_js_url = 'files/nv.d3.js' + nvd3_css_url = 'files/nv.d3.css' + + # load the required javascript files + + #display(Javascript('''$.getScript("%s")''' %(d3_js_url))) + display(HTML('''''' % (nvd3_css_url))) + # The following two methods for loading the script file are redundant. + # This is intentional. + # Ipython's loading of javscript in version 1.x is a bit squirrely, especially + # when creating demos to view in nbviewer. + # by trying twice, in two different ways (one using jquery and one using plain old + # HTML), we maximize our chances of successfully loading the script. + display(Javascript('''$.getScript("%s")''' % (nvd3_js_url))) + display(Javascript('''$.getScript("%s", function() { + $.getScript("%s", function() {})});''' % (d3_js_url, nvd3_js_url))) + display(HTML('' % (d3_js_url))) + display(HTML('' % (nvd3_js_url))) + + global _js_initialized + _js_initialized = True + + print('loaded nvd3 IPython extension\n' + 'run nvd3.ipynb.initialize_javascript() to set up the notebook\n' + 'help(nvd3.ipynb.initialize_javascript) for options') + + _setup_ipython_formatter(_ip) diff --git a/airflow/_vendor/nvd3/lineChart.py b/airflow/_vendor/nvd3/lineChart.py new file mode 100644 index 0000000000000..c237d069802ad --- /dev/null +++ b/airflow/_vendor/nvd3/lineChart.py @@ -0,0 +1,120 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class lineChart(TemplateMixin, NVD3Chart): + + """ + A line chart or line graph is a type of chart which displays information + as a series of data points connected by straight line segments. + + Python example:: + + from nvd3 import lineChart + chart = lineChart(name="lineChart", x_is_date=False, x_axis_format="AM_PM") + + xdata = range(24) + ydata = [0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 4, 3, 3, 5, 7, 5, 3, 16, 6, 9, 15, 4, 12] + ydata2 = [9, 8, 11, 8, 3, 7, 10, 8, 6, 6, 9, 6, 5, 4, 3, 10, 0, 6, 3, 1, 0, 0, 0, 1] + + extra_serie = {"tooltip": {"y_start": "There are ", "y_end": " calls"}} + chart.add_serie(y=ydata, x=xdata, name='sine', extra=extra_serie, **kwargs1) + extra_serie = {"tooltip": {"y_start": "", "y_end": " min"}} + chart.add_serie(y=ydata2, x=xdata, name='cose', extra=extra_serie, **kwargs2) + chart.buildhtml() + + Javascript renderd to: + + .. raw:: html + +
+ + + See the source code of this page, to see the underlying javascript. + """ + CHART_FILENAME = "./linechart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(lineChart, self).__init__(**kwargs) + self.model = 'lineChart' + + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + + if kwargs.get('x_is_date', False): + self.set_date_flag(True) + self.create_x_axis('xAxis', + format=kwargs.get('x_axis_format', '%d %b %Y'), + date=True) + self.set_custom_tooltip_flag(True) + else: + if kwargs.get('x_axis_format') == 'AM_PM': + self.x_axis_format = format = 'AM_PM' + else: + format = kwargs.get('x_axis_format', 'r') + self.create_x_axis('xAxis', format=format, + custom_format=kwargs.get('x_custom_format', + False)) + self.create_y_axis( + 'yAxis', + format=kwargs.get('y_axis_format', '.02f'), + custom_format=kwargs.get('y_custom_format', False)) + + # must have a specified height, otherwise it superimposes both chars + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/linePlusBarChart.py b/airflow/_vendor/nvd3/linePlusBarChart.py new file mode 100644 index 0000000000000..4eaa5fc6ffdbf --- /dev/null +++ b/airflow/_vendor/nvd3/linePlusBarChart.py @@ -0,0 +1,131 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class linePlusBarChart(TemplateMixin, NVD3Chart): + + """ + A linePlusBarChart Chart is a type of chart which displays information + as a series of data points connected by straight line segments + and with some series with rectangular bars with lengths proportional + to the values that they represent. + + Python example:: + + from nvd3 import linePlusBarChart + chart = linePlusBarChart(name="linePlusBarChart", + width=500, height=400, x_axis_format="%d %b %Y", + x_is_date=True, focus_enable=True, + yaxis2_format="function(d) { return d3.format(',0.3f')(d) }") + + xdata = [1338501600000, 1345501600000, 1353501600000] + ydata = [6, 5, 1] + y2data = [0.002, 0.003, 0.004] + + extra_serie = {"tooltip": {"y_start": "There are ", "y_end": " calls"}, + "date_format": "%d %b %Y %H:%S" } + chart.add_serie(name="Serie 1", y=ydata, x=xdata, extra=extra_serie, + bar=True) + + extra_serie = {"tooltip": {"y_start": "There are ", "y_end": " min"}} + chart.add_serie(name="Serie 2", y=y2data, x=xdata, extra=extra_serie) + chart.buildcontent() + + Note that in case you have two data serie with extreme different numbers, + that you would like to format in different ways, + you can pass a keyword *yaxis1_format* or *yaxis2_format* when + creating the graph. + + In the example above the graph created presents the values of the second + data series with three digits right of the decimal point. + + Javascript generated: + + .. raw:: html + +
+ + + """ + CHART_FILENAME = "./lineplusbarchart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(linePlusBarChart, self).__init__(**kwargs) + self.model = 'linePlusBarChart' + + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + self.yaxis1_format = kwargs.get('yaxis1_format', + "function(d) { return d3.format(',f')(d) }") + self.yaxis2_format = kwargs.get('yaxis2_format', + "function(d) { return d3.format(',f')(d) }") + + if kwargs.get('x_is_date', False): + self.set_date_flag(True) + self.create_x_axis('xAxis', + format=kwargs.get('x_axis_format', + '%d %b %Y %H %S'), + date=True) + self.create_x_axis('x2Axis', format=kwargs.get('x_axis_format', + '%d %b %Y %H %S'), + date=True) + self.set_custom_tooltip_flag(True) + else: + self.create_x_axis('xAxis', format=kwargs.get('x_axis_format', + '.2f')) + self.create_x_axis('x2Axis', format=kwargs.get('x_axis_format', + '.2f')) + + self.create_y_axis('y1Axis', format=self.yaxis1_format, + custom_format=True) + self.create_y_axis('y2Axis', format=self.yaxis2_format, + custom_format=True) + + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/lineWithFocusChart.py b/airflow/_vendor/nvd3/lineWithFocusChart.py new file mode 100644 index 0000000000000..cd26cd4716652 --- /dev/null +++ b/airflow/_vendor/nvd3/lineWithFocusChart.py @@ -0,0 +1,105 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class lineWithFocusChart(TemplateMixin, NVD3Chart): + """ + A lineWithFocusChart or line graph is a type of chart which displays information + as a series of data points connected by straight line segments. + The lineWithFocusChart provide a smaller chart that act as a selector, + this is very useful if you want to zoom on a specific time period. + + Python example:: + + from nvd3 import lineWithFocusChart + chart = lineWithFocusChart(name='lineWithFocusChart', x_is_date=True, x_axis_format="%d %b %Y") + xdata = [1365026400000000, 1365026500000000, 1365026600000000, 1365026700000000, 1365026800000000, 1365026900000000, 1365027000000000] + ydata = [-6, 5, -1, 2, 4, 8, 10] + + extra_serie = {"tooltip": {"y_start": "", "y_end": " ext"}, + "date_format": "%d %b %Y"} + chart.add_serie(name="Serie 1", y=ydata, x=xdata, extra=extra_serie) + chart.buildhtml() + + Javascript generated: + + .. raw:: html + +
+ + + """ + + CHART_FILENAME = "./linewfocuschart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(lineWithFocusChart, self).__init__(**kwargs) + self.model = 'lineWithFocusChart' + + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + + if kwargs.get('x_is_date', False): + self.set_date_flag(True) + self.create_x_axis('xAxis', format=kwargs.get('x_axis_format', + '%d %b %Y %H %S'), + date=True) + self.create_x_axis('x2Axis', format=kwargs.get('x_axis_format', + '%d %b %Y %H %S'), + date=True) + self.set_custom_tooltip_flag(True) + else: + self.create_x_axis('xAxis', format=kwargs.get('x_axis_format', + '.2f')) + self.create_x_axis('x2Axis', format=kwargs.get('x_axis_format', + '.2f')) + + self.create_y_axis('yAxis', format=kwargs.get('y_axis_format', '.2f')) + self.create_y_axis('y2Axis', format=kwargs.get('y_axis_format', '.2f')) + + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/multiBarChart.py b/airflow/_vendor/nvd3/multiBarChart.py new file mode 100644 index 0000000000000..cf335919a84c7 --- /dev/null +++ b/airflow/_vendor/nvd3/multiBarChart.py @@ -0,0 +1,95 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class multiBarChart(TemplateMixin, NVD3Chart): + """ + A multiple bar graph contains comparisons of two or more categories or bars. + One axis represents a quantity and the other axis identifies a specific feature + about the categories. Reading a multiple bar graph includes looking at extremes + (tallest/longest vs. shortest) in each grouping. + + Python example:: + + from nvd3 import multiBarChart + chart = multiBarChart(width=500, height=400, x_axis_format=None) + xdata = ['one', 'two', 'three', 'four'] + ydata1 = [6, 12, 9, 16] + ydata2 = [8, 14, 7, 11] + + chart.add_serie(name="Serie 1", y=ydata1, x=xdata) + chart.add_serie(name="Serie 2", y=ydata2, x=xdata) + chart.buildhtml() + + Javascript generated: + + .. raw:: html + +
+ + + """ + + CHART_FILENAME = "./multibarchart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(multiBarChart, self).__init__(**kwargs) + + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + + if kwargs.get('x_is_date', False): + self.set_date_flag(True) + self.create_x_axis('xAxis', + format=kwargs.get('x_axis_format', '%d %b %Y'), + date=True) + self.set_custom_tooltip_flag(True) + else: + self.create_x_axis('xAxis', format=kwargs.get('x_axis_format', '.2f')) + self.create_y_axis('yAxis', format=kwargs.get('y_axis_format', '.2f')) + + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/multiBarHorizontalChart.py b/airflow/_vendor/nvd3/multiBarHorizontalChart.py new file mode 100644 index 0000000000000..ac969c31b548c --- /dev/null +++ b/airflow/_vendor/nvd3/multiBarHorizontalChart.py @@ -0,0 +1,100 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class multiBarHorizontalChart(TemplateMixin, NVD3Chart): + """ + A multiple horizontal bar graph contains comparisons of two or more categories or bars. + + Python example:: + + from nvd3 import multiBarHorizontalChart + chart = multiBarHorizontalChart(name='multiBarHorizontalChart', height=400, width=400) + xdata = [-14, -7, 7, 14] + ydata = [-6, 5, -1, 9] + y2data = [-23, -6, -32, 9] + + extra_serie = {"tooltip": {"y_start": "", "y_end": " balls"}} + chart.add_serie(name="Serie 1", y=ydata, x=xdata, extra=extra_serie) + + extra_serie = {"tooltip": {"y_start": "", "y_end": " calls"}} + chart.add_serie(name="Serie 2", y=y2data, x=xdata, extra=extra_serie) + chart.buildcontent() + + Javascript generated: + + .. raw:: html + +
+ + + """ + + CHART_FILENAME = "./multibarcharthorizontal.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(multiBarHorizontalChart, self).__init__(**kwargs) + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + + self.create_x_axis('xAxis', format=kwargs.get('x_axis_format', '.2f')) + self.create_y_axis('yAxis', format=kwargs.get('y_axis_format', '.2f')) + + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/pieChart.py b/airflow/_vendor/nvd3/pieChart.py new file mode 100644 index 0000000000000..1db76bdb3424c --- /dev/null +++ b/airflow/_vendor/nvd3/pieChart.py @@ -0,0 +1,101 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class pieChart(TemplateMixin, NVD3Chart): + + """ + A pie chart (or a circle graph) is a circular chart divided into sectors, + illustrating numerical proportion. In chart, the arc length of each sector + is proportional to the quantity it represents. + + Python example:: + + from nvd3 import pieChart + chart = pieChart(name='pieChart', color_category='category20c', + height=400, width=400) + + xdata = ["Orange", "Banana", "Pear", "Kiwi", "Apple", "Strawbery", + "Pineapple"] + ydata = [3, 4, 0, 1, 5, 7, 3] + + extra_serie = {"tooltip": {"y_start": "", "y_end": " cal"}} + chart.add_serie(y=ydata, x=xdata, extra=extra_serie) + chart.buildhtml() + + Javascript generated: + + .. raw:: html + +
+ + + """ + CHART_FILENAME = "./piechart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(pieChart, self).__init__(**kwargs) + + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + self.donut = kwargs.get('donut', False) + self.donutRatio = kwargs.get('donutRatio', 0.35) + self.color_list = [] + self.create_x_axis('xAxis', format=None) + self.create_y_axis('yAxis', format=None) + # must have a specified height, otherwise it superimposes both chars + if height: + self.set_graph_height(height) + if width: + self.set_graph_width(width) + self.donut = kwargs.get('donut', False) + self.donutRatio = kwargs.get('donutRatio', 0.35) diff --git a/airflow/_vendor/nvd3/scatterChart.py b/airflow/_vendor/nvd3/scatterChart.py new file mode 100644 index 0000000000000..c3a87d2908bde --- /dev/null +++ b/airflow/_vendor/nvd3/scatterChart.py @@ -0,0 +1,121 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class scatterChart(TemplateMixin, NVD3Chart): + + """ + A scatter plot or scattergraph is a type of mathematical diagram using Cartesian + coordinates to display values for two variables for a set of data. + The data is displayed as a collection of points, each having the value of one variable + determining the position on the horizontal axis and the value of the other variable + determining the position on the vertical axis. + + Python example:: + + from nvd3 import scatterChart + chart = scatterChart(name='scatterChart', height=400, width=400) + xdata = [3, 4, 0, -3, 5, 7] + ydata = [-1, 2, 3, 3, 15, 2] + ydata2 = [1, -2, 4, 7, -5, 3] + + kwargs1 = {'shape': 'circle', 'size': '1'} + kwargs2 = {'shape': 'cross', 'size': '10'} + + extra_serie = {"tooltip": {"y_start": "", "y_end": " call"}} + chart.add_serie(name="series 1", y=ydata, x=xdata, extra=extra_serie, **kwargs1) + + extra_serie = {"tooltip": {"y_start": "", "y_end": " min"}} + chart.add_serie(name="series 2", y=ydata2, x=xdata, extra=extra_serie, **kwargs2) + chart.buildhtml() + + Javascript generated: + + .. raw:: html + +
+ + + """ + + CHART_FILENAME = "./scatterchart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(scatterChart, self).__init__(**kwargs) + self.model = 'scatterChart' + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + self.create_x_axis('xAxis', format=kwargs.get('x_axis_format', '.02f'), + label=kwargs.get('x_axis_label', None)) + self.create_y_axis('yAxis', format=kwargs.get('y_axis_format', '.02f'), + label=kwargs.get('y_axis_label', None)) + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/stackedAreaChart.py b/airflow/_vendor/nvd3/stackedAreaChart.py new file mode 100644 index 0000000000000..8346cd2c53879 --- /dev/null +++ b/airflow/_vendor/nvd3/stackedAreaChart.py @@ -0,0 +1,99 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +Python-nvd3 is a Python wrapper for NVD3 graph library. +NVD3 is an attempt to build re-usable charts and chart components +for d3.js without taking away the power that d3.js gives you. + +Project location : https://github.com/areski/python-nvd3 +""" + +from .NVD3Chart import NVD3Chart, TemplateMixin + + +class stackedAreaChart(TemplateMixin, NVD3Chart): + """ + The stacked area chart is identical to the area chart, except the areas are stacked + on top of each other, rather than overlapping. This can make the chart much easier to read. + + Python example:: + + from nvd3 import stackedAreaChart + chart = stackedAreaChart(name='stackedAreaChart', height=400, width=400) + + xdata = [100, 101, 102, 103, 104, 105, 106,] + ydata = [6, 11, 12, 7, 11, 10, 11] + ydata2 = [8, 20, 16, 12, 20, 28, 28] + + extra_serie = {"tooltip": {"y_start": "There is ", "y_end": " min"}} + chart.add_serie(name="Serie 1", y=ydata, x=xdata, extra=extra_serie) + chart.add_serie(name="Serie 2", y=ydata2, x=xdata, extra=extra_serie) + chart.buildhtml() + + Javascript generated: + + .. raw:: html + +
+ + + """ + + CHART_FILENAME = "./stackedareachart.html" + template_chart_nvd3 = NVD3Chart.template_environment.get_template(CHART_FILENAME) + + def __init__(self, **kwargs): + super(stackedAreaChart, self).__init__(**kwargs) + height = kwargs.get('height', 450) + width = kwargs.get('width', None) + self.model = 'stackedAreaChart' + + if kwargs.get('x_is_date', False): + self.set_date_flag(True) + self.create_x_axis('xAxis', + format=kwargs.get('x_axis_format', '%d %b %Y'), + date=True) + self.set_custom_tooltip_flag(True) + else: + self.create_x_axis('xAxis', format=kwargs.get('x_axis_format', + '.2f')) + self.create_y_axis('yAxis', format=kwargs.get('y_axis_format', '.2f')) + + self.set_graph_height(height) + if width: + self.set_graph_width(width) diff --git a/airflow/_vendor/nvd3/templates/base.html b/airflow/_vendor/nvd3/templates/base.html new file mode 100644 index 0000000000000..e2d39dd7642cb --- /dev/null +++ b/airflow/_vendor/nvd3/templates/base.html @@ -0,0 +1,35 @@ +{% block container %} +{% endblock %} + +{% block start_script %} + {% if chart.tag_script_js %} + + {% endif %} +{% endblock endscript %} diff --git a/airflow/_vendor/nvd3/templates/content.html b/airflow/_vendor/nvd3/templates/content.html new file mode 100644 index 0000000000000..787f39b555a4a --- /dev/null +++ b/airflow/_vendor/nvd3/templates/content.html @@ -0,0 +1,123 @@ +{% extends "base.html" %} +{% block container %} +{% if chart.display_container %} + {{ chart.container }} +{% endif %} +{% endblock container %} + +{% block body %} + {% block data %} + data_{{ chart.name }}={{ chart.series_js }}; + {% endblock data %} + + {% block init %} + nv.addGraph(function() { + var chart = nv.models.{{ chart.model }}(){% if chart.use_interactive_guideline %}.useInteractiveGuideline(true){% endif %}; + + chart.margin({top: {{ chart.margin_top }}, right: {{ chart.margin_right }}, bottom: {{ chart.margin_bottom }}, left: {{ chart.margin_left }}}); + + var datum = data_{{ chart.name }}; + + {% if not chart.color_list and chart.color_category %} + chart.color(d3.scale.{{ chart.color_category }}().range()); + {% endif %} + {% endblock init %} + + {% if chart.stacked %} + chart.stacked(true); + {% endif %} + + {% block focus %} + {% endblock focus %} + + + {% block axes %} + {% for axis, a in chart.axislist.items() %} + {% if a.items() %} + chart.{{ axis }} + {% for attr, value in a.items() %} + .{{ attr}}({{ value}}){% if loop.last %}; + {% endif %} + {% endfor %} + {% endif %} + {% endfor %} + {% endblock axes %} + + {# generate custom tooltip for the chart #} + {% block tooltip %} + {% if chart.custom_tooltip_flag %} + {% if not chart.date_flag %} + {% if chart.model == 'pieChart' %} + {% block pietooltip %} + {% endblock pietooltip %} + {% else %} + chart.tooltipContent(function(key, y, e, graph) { + var x = String(graph.point.x); + var y = String(graph.point.y); + {{ chart.tooltip_condition_string }} + tooltip_str = '
'+key+'
' + y + ' at ' + x; + return tooltip_str; + }); + {% endif %} + {% else %} + chart.tooltipContent(function(key, y, e, graph) { + var x = d3.time.format("{{ chart.charttooltip_dateformat }}")(new Date(parseInt(graph.point.x))); + var y = String(graph.point.y); + {{ chart.tooltip_condition_string }} + tooltip_str = '
'+key+'
' + y + ' on ' + x; + return tooltip_str; + }); + {% endif %} + {% endif %} + {% endblock tooltip %} + + {# the shape attribute in kwargs is not applied when #} + {# not allowing other shapes to be rendered #} + {% block legend %} + chart.showLegend({{chart.show_legend|lower}}); + {% endblock legend %} + + {% block custoattr %} + {# add custom chart attributes #} + {% for attr, value in chart.chart_attr.items() %} + {% if value is string and value.startswith(".") %}: + chart.{{ attr }}{{ value }}; + {% else %} + chart.{{ attr }}({{ value }}); + {% endif %} + {% endfor %} + + {% if chart.resize %} + nv.utils.windowResize(chart.update); + {% endif %} + + {# include specific subchart #} + {{ chart.jschart }} + + {% endblock custoattr %} + + {% block inject %} + {# Inject data to D3 #} + d3.select('#{{ chart.name }} svg') + .datum(datum) + .transition().duration(500) + {% if chart.width %} + .attr('width', {{ chart.width}}) + {% endif %} + {% if chart.height %} + .attr('height', {{ chart.height}}) + {% endif %} + .call(chart); + {% endblock inject %} + + {# extra chart attributes #} + {% if chart.extras %} + {{ chart.extras }} + {% endif %} + + {# closing nv.addGraph #} + {% block close %} + }); + {% endblock close %} + +{% endblock body %} diff --git a/airflow/_vendor/nvd3/templates/cumulativelinechart.html b/airflow/_vendor/nvd3/templates/cumulativelinechart.html new file mode 100644 index 0000000000000..546a3e8e55171 --- /dev/null +++ b/airflow/_vendor/nvd3/templates/cumulativelinechart.html @@ -0,0 +1,11 @@ +{# This template adds attributes unique + to cumulativeLineChart #} + +{% extends "content.html" %} +{% block body %} + +{# calling super guarantees everying in content is also found here ...#} +{{super()}} + +{% endblock body %} + diff --git a/airflow/_vendor/nvd3/templates/discretebarchart.html b/airflow/_vendor/nvd3/templates/discretebarchart.html new file mode 100644 index 0000000000000..2e31ae4874be0 --- /dev/null +++ b/airflow/_vendor/nvd3/templates/discretebarchart.html @@ -0,0 +1,31 @@ +{# This is a dummy template, we can use that template to add attributes unique + to discreteBarChart #} + +{% extends "content.html" %} +{% block body %} + + {% block data %} + {{super()}} + {% endblock data %} + + {% block init %} + {{super()}} + {% endblock init %} + + {% block axes %} + {{super()}} + {% endblock axes %} + + {% block custoattr %} + {{super()}} + {% endblock custoattr %} + + {% block inject %} + {{ super() }} + {% endblock inject %} + + {% block close %} + {{ super() }} + {% endblock close %} + +{% endblock body %} diff --git a/airflow/_vendor/nvd3/templates/linebarwfocuschart.html b/airflow/_vendor/nvd3/templates/linebarwfocuschart.html new file mode 100644 index 0000000000000..ad4866c8153f9 --- /dev/null +++ b/airflow/_vendor/nvd3/templates/linebarwfocuschart.html @@ -0,0 +1,60 @@ +{# This template adds attributes unique + to lineChart #} + +{% extends "content.html" %} +{% block body %} + {% block data %} + data_{{ chart.name }}={{ chart.series_js }}; + {% endblock data %} + + + {% block init %} + {{super()}} + {% endblock init %} + {% block axes %} + {{super()}} + {% endblock axes %} + {% block tooltip %} + {{super()}} + {% endblock tooltip %} + + chart.showLegend({{chart.show_legend|lower}}); + + {# add custom chart attributes #} + {% for attr, value in chart.chart_attr.items() %} + {% if value is string and value.startswith(".") %}: + chart.{{ attr }}{{ value }}; + {% else %} + chart.{{ attr }}({{ value }}); + {% endif %} + {% endfor %} + + {% if chart.x_axis_format == 'AM_PM' %} + function get_am_pm(d){ + if (d > 12) { + d = d - 12; return (String(d) + 'PM'); + } + else { + return (String(d) + 'AM'); + } + }; + {% else %} + chart.x(function(d,i) { return i }); + {% endif %} + + {% if chart.resize %} + nv.utils.windowResize(chart.update); + {% endif %} + {% block inject %} + {{super()}} + {% endblock inject %} + + {% if chart.extras %} + {{ chart.extras }} + {% endif %} + + {% block close %} + }); + {% endblock close %} + +{% endblock body %} diff --git a/airflow/_vendor/nvd3/templates/linechart.html b/airflow/_vendor/nvd3/templates/linechart.html new file mode 100644 index 0000000000000..cf15d33041558 --- /dev/null +++ b/airflow/_vendor/nvd3/templates/linechart.html @@ -0,0 +1,47 @@ +{# This template adds attributes unique + to lineChart #} + +{% extends "content.html" %} +{% block body %} + + {% block data %} + {{super()}} + {% endblock data %} + + {% block init %} + {{super()}} + {% endblock init %} + + {% block axes %} + {{super()}} + {% endblock axes %} + + {% if chart.x_axis_format == 'AM_PM' %} + function get_am_pm(d){ + if (d > 12) { + d = d - 12; return (String(d) + 'PM'); + } + else { + return (String(d) + 'AM'); + } + }; + {% endif %} + + {% block legend %} + {{super()}} + {% endblock legend %} + + {% block custoattr %} + {{super()}} + {% endblock custoattr %} + + {% block inject %} + {{ super() }} + {% endblock inject %} + + {% block close %} + {{ super() }} + {% endblock close %} + +{% endblock body %} + diff --git a/airflow/_vendor/nvd3/templates/lineplusbarchart.html b/airflow/_vendor/nvd3/templates/lineplusbarchart.html new file mode 100644 index 0000000000000..73aeceacd2419 --- /dev/null +++ b/airflow/_vendor/nvd3/templates/lineplusbarchart.html @@ -0,0 +1,44 @@ +{# This template adds attributes unique + to linePlusBarChart #} + +{% extends "content.html" %} +{% block body %} + + {% block data %} + {{super()}} + {% endblock data %} + + {% block init %} + {{super()}} + {% endblock init %} + + {% block focus %} + {% if chart.focus_enable %} + chart.focusEnable(true); + {% else %} + chart.focusEnable(false); + {% endif %} + {% endblock focus %} + + {% block axes %} + {{super()}} + {% endblock axes %} + + {% block legend %} + {{super()}} + {% endblock legend %} + + {% block custoattr %} + {{super()}} + {% endblock custoattr %} + + {% block inject %} + {{ super() }} + {% endblock inject %} + + {% block close %} + {{ super() }} + {% endblock close %} + +{% endblock body %} + diff --git a/airflow/_vendor/nvd3/templates/linewfocuschart.html b/airflow/_vendor/nvd3/templates/linewfocuschart.html new file mode 100644 index 0000000000000..5abe983d7efc3 --- /dev/null +++ b/airflow/_vendor/nvd3/templates/linewfocuschart.html @@ -0,0 +1,10 @@ +{# This template adds attributes unique + to lineWithFocusChart #} + +{% extends "content.html" %} +{% block body %} + +{# calling super guarantees everying in content is also found here ...#} +{{super()}} + +{% endblock body %} diff --git a/airflow/_vendor/nvd3/templates/multibarchart.html b/airflow/_vendor/nvd3/templates/multibarchart.html new file mode 100644 index 0000000000000..17eae7a634fef --- /dev/null +++ b/airflow/_vendor/nvd3/templates/multibarchart.html @@ -0,0 +1,10 @@ +{# This template adds attributes unique + to multiBarChart #} + +{% extends "content.html" %} +{% block body %} + +{# calling super guarantees everying in content is also found here ...#} +{{super()}} + +{% endblock body %} diff --git a/airflow/_vendor/nvd3/templates/multibarcharthorizontal.html b/airflow/_vendor/nvd3/templates/multibarcharthorizontal.html new file mode 100644 index 0000000000000..17eae7a634fef --- /dev/null +++ b/airflow/_vendor/nvd3/templates/multibarcharthorizontal.html @@ -0,0 +1,10 @@ +{# This template adds attributes unique + to multiBarChart #} + +{% extends "content.html" %} +{% block body %} + +{# calling super guarantees everying in content is also found here ...#} +{{super()}} + +{% endblock body %} diff --git a/airflow/_vendor/nvd3/templates/page.html b/airflow/_vendor/nvd3/templates/page.html new file mode 100644 index 0000000000000..2dd0f5d16f829 --- /dev/null +++ b/airflow/_vendor/nvd3/templates/page.html @@ -0,0 +1,12 @@ + + + + + {% for header_element in chart.header_css+chart.header_js %} + {{ header_element }} + {% endfor %} + + + {{ chart.content }} + + diff --git a/airflow/_vendor/nvd3/templates/piechart.html b/airflow/_vendor/nvd3/templates/piechart.html new file mode 100644 index 0000000000000..a200e6d4a21bb --- /dev/null +++ b/airflow/_vendor/nvd3/templates/piechart.html @@ -0,0 +1,80 @@ +{# This template adds attributes unique + to pieChart #} + +{% extends "content.html" %} +{% block body %} + + data_{{ chart.name }}={{ chart.series_js }}; + + nv.addGraph(function() { + var chart = nv.models.{{ chart.model }}(){% if chart.use_interactive_guideline %}.useInteractiveGuideline(true){% endif %}; + chart.margin({top: {{ chart.margin_top }}, right: {{ chart.margin_right }}, bottom: {{ chart.margin_bottom }}, left: {{ chart.margin_left }}}); + var datum = data_{{ chart.name }}[0].values; + + {% if not chart.color_list and chart.color_category %} + chart.color(d3.scale.{{ chart.color_category }}().range()); + {% endif %} + + chart.tooltipContent(function(key, y, e, graph) { + var x = String(key); + {{ chart.tooltip_condition_string }} + tooltip_str = '
'+x+'
' + y; + return tooltip_str; + }); + {# showLabels only supported in pieChart #} + chart.showLabels({{chart.show_labels|lower}}); + + {% if chart.donut %} + chart.donut(true); + chart.donutRatio({{ chart.donutRatio }}); + {% else %} + chart.donut(false); + {% endif %} + + chart.showLegend({{chart.show_legend|lower}}); + + {# add custom chart attributes #} + {% for attr, value in chart.chart_attr.items() %} + {% if value is string and value.startswith(".") %}: + chart.{{ attr }}{{ value }}; + {% else %} + chart.{{ attr }}({{ value }}); + {% endif %} + {% endfor %} + + {% if chart.resize %} + nv.utils.windowResize(chart.update); + {% endif %} + + {% if chart.color_list %} + var mycolor = new Array(); + {% for color in chart.color_list %} + mycolor[{{ loop.index - 1}}] = "{{ color }}"; + {% endfor %} + {% endif %} + + chart + .x(function(d) { return d.label }) + .y(function(d) { return d.value }); + + {% if chart.width %} + chart.width({{ chart.width }}); + {% endif %} + + {% if chart.height %} + chart.height({{ chart.height }}); + {% endif %} + + {% if chart.color_list %} + chart.color(mycolor); + {% endif %} + + {% block inject %} + {{super()}} + {% endblock inject %} + + {% block close %} + {{ super() }} + {% endblock close %} + +{% endblock body %} diff --git a/airflow/_vendor/nvd3/templates/scatterchart.html b/airflow/_vendor/nvd3/templates/scatterchart.html new file mode 100644 index 0000000000000..8c2adaae34cee --- /dev/null +++ b/airflow/_vendor/nvd3/templates/scatterchart.html @@ -0,0 +1,52 @@ +{# This template adds attributes unique + to scatterChart #} + +{% extends "content.html" %} +{% block body %} + + {% block data %} + {{super()}} + {% endblock data %} + + {% block init %} + {{super()}} + {% endblock init %} + + {% block axes %} + {{super()}} + {% endblock axes %} + + {% if chart.x_axis_format == 'AM_PM' %} + function get_am_pm(d){ + if (d > 12) { + d = d - 12; return (String(d) + 'PM'); + } + else { + return (String(d) + 'AM'); + } + }; + {% endif %} + + {% block legend %} + {{super()}} + {% endblock legend %} + + {% block custoattr %} + {{super()}} + {% endblock custoattr %} + + {% block inject %} + + chart + .showDistX(true) + .showDistY(true) + .color(d3.scale.category10().range()); + + {{ super() }} + {% endblock inject %} + + {% block close %} + {{ super() }} + {% endblock close %} + +{% endblock body %} diff --git a/airflow/_vendor/nvd3/templates/stackedareachart.html b/airflow/_vendor/nvd3/templates/stackedareachart.html new file mode 100644 index 0000000000000..b70833d2b385d --- /dev/null +++ b/airflow/_vendor/nvd3/templates/stackedareachart.html @@ -0,0 +1,7 @@ +{# This is a dummy template, we can use that template to add attributes unique + to stackedareachart #} + +{% extends "content.html" %} +{% block body %} + {{ super() }} +{% endblock body %} diff --git a/airflow/_vendor/nvd3/translator.py b/airflow/_vendor/nvd3/translator.py new file mode 100644 index 0000000000000..ffde2c2a1cec9 --- /dev/null +++ b/airflow/_vendor/nvd3/translator.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- + + +class Tag(object): + """Tag class""" + + def __init__(self, content=None): + self.content = content + self.attrs = ' '.join(['%s="%s"' % (attr, value) + for attr, value in self.attrs]) + + def __str__(self): + return '<%s%s>\n %s\n' % (self.name, + ' ' + self.attrs if self.attrs else '', + self.content, + self.name) + + +class ScriptTag(Tag): + name = 'script' + attrs = (('type', 'text/javascript'),) + + +class AnonymousFunction(object): + def __init__(self, arguments, content): + self.arguments = arguments + self.content = content + + def __str__(self): + return 'function(%s) { %s }' % (self.arguments, self.content) + + +class Function(object): + + def __init__(self, name): + self.name = name + self._calls = [] + + def __str__(self): + operations = [self.name] + operations.extend(str(call) for call in self._calls) + return '%s' % ('.'.join(operations),) + + def __getattr__(self, attr): + self._calls.append(attr) + return self + + def __call__(self, *args): + if not args: + self._calls[-1] = self._calls[-1] + '()' + else: + arguments = ','.join([str(arg) for arg in args]) + self._calls[-1] = self._calls[-1] + '(%s)' % (arguments,) + return self + + +class Assignment(object): + + def __init__(self, key, value, scoped=True): + self.key = key + self.value = value + self.scoped = scoped + + def __str__(self): + return '%s%s = %s;' % ('var ' if self.scoped else '', self.key, self.value) + + +def indent(func): + # TODO: Add indents to function str + return str(func) diff --git a/airflow/_vendor/slugify/LICENSE b/airflow/_vendor/slugify/LICENSE new file mode 100644 index 0000000000000..82af695f594e8 --- /dev/null +++ b/airflow/_vendor/slugify/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) Val Neekman @ Neekware Inc. http://neekware.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/airflow/_vendor/slugify/__init__.py b/airflow/_vendor/slugify/__init__.py new file mode 100644 index 0000000000000..7358b998cd543 --- /dev/null +++ b/airflow/_vendor/slugify/__init__.py @@ -0,0 +1,6 @@ +from .slugify import * + + +__author__ = 'Val Neekman @ Neekware Inc. [@vneekman]' +__description__ = 'A Python slugify application that also handles Unicode' +__version__ = '2.0.1' diff --git a/airflow/_vendor/slugify/slugify.py b/airflow/_vendor/slugify/slugify.py new file mode 100644 index 0000000000000..59e9672a16c5b --- /dev/null +++ b/airflow/_vendor/slugify/slugify.py @@ -0,0 +1,188 @@ +import re +import unicodedata +import types +import sys + +try: + from htmlentitydefs import name2codepoint + _unicode = unicode + _unicode_type = types.UnicodeType +except ImportError: + from html.entities import name2codepoint + _unicode = str + _unicode_type = str + unichr = chr + +try: + import unidecode +except ImportError: + import text_unidecode as unidecode + +__all__ = ['slugify', 'smart_truncate'] + + +CHAR_ENTITY_PATTERN = re.compile(r'&(%s);' % '|'.join(name2codepoint)) +DECIMAL_PATTERN = re.compile(r'&#(\d+);') +HEX_PATTERN = re.compile(r'&#x([\da-fA-F]+);') +QUOTE_PATTERN = re.compile(r'[\']+') +ALLOWED_CHARS_PATTERN = re.compile(r'[^-a-z0-9]+') +ALLOWED_CHARS_PATTERN_WITH_UPPERCASE = re.compile(r'[^-a-zA-Z0-9]+') +DUPLICATE_DASH_PATTERN = re.compile(r'-{2,}') +NUMBERS_PATTERN = re.compile(r'(?<=\d),(?=\d)') +DEFAULT_SEPARATOR = '-' + + +def smart_truncate(string, max_length=0, word_boundary=False, separator=' ', save_order=False): + """ + Truncate a string. + :param string (str): string for modification + :param max_length (int): output string length + :param word_boundary (bool): + :param save_order (bool): if True then word order of output string is like input string + :param separator (str): separator between words + :return: + """ + + string = string.strip(separator) + + if not max_length: + return string + + if len(string) < max_length: + return string + + if not word_boundary: + return string[:max_length].strip(separator) + + if separator not in string: + return string[:max_length] + + truncated = '' + for word in string.split(separator): + if word: + next_len = len(truncated) + len(word) + if next_len < max_length: + truncated += '{0}{1}'.format(word, separator) + elif next_len == max_length: + truncated += '{0}'.format(word) + break + else: + if save_order: + break + if not truncated: # pragma: no cover + truncated = string[:max_length] + return truncated.strip(separator) + + +def slugify(text, entities=True, decimal=True, hexadecimal=True, max_length=0, word_boundary=False, + separator=DEFAULT_SEPARATOR, save_order=False, stopwords=(), regex_pattern=None, lowercase=True, + replacements=()): + """ + Make a slug from the given text. + :param text (str): initial text + :param entities (bool): + :param decimal (bool): + :param hexadecimal (bool): + :param max_length (int): output string length + :param word_boundary (bool): + :param save_order (bool): if parameter is True and max_length > 0 return whole words in the initial order + :param separator (str): separator between words + :param stopwords (iterable): words to discount + :param regex_pattern (str): regex pattern for allowed characters + :param lowercase (bool): activate case sensitivity by setting it to False + :param replacements (iterable): list of replacement rules e.g. [['|', 'or'], ['%', 'percent']] + :return (str): + """ + + # user-specific replacements + if replacements: + for old, new in replacements: + text = text.replace(old, new) + + # ensure text is unicode + if not isinstance(text, _unicode_type): + text = _unicode(text, 'utf-8', 'ignore') + + # replace quotes with dashes - pre-process + text = QUOTE_PATTERN.sub(DEFAULT_SEPARATOR, text) + + # decode unicode + text = unidecode.unidecode(text) + + # ensure text is still in unicode + if not isinstance(text, _unicode_type): + text = _unicode(text, 'utf-8', 'ignore') + + # character entity reference + if entities: + text = CHAR_ENTITY_PATTERN.sub(lambda m: unichr(name2codepoint[m.group(1)]), text) + + # decimal character reference + if decimal: + try: + text = DECIMAL_PATTERN.sub(lambda m: unichr(int(m.group(1))), text) + except Exception: + pass + + # hexadecimal character reference + if hexadecimal: + try: + text = HEX_PATTERN.sub(lambda m: unichr(int(m.group(1), 16)), text) + except Exception: + pass + + # translate + text = unicodedata.normalize('NFKD', text) + if sys.version_info < (3,): + text = text.encode('ascii', 'ignore') + + # make the text lowercase (optional) + if lowercase: + text = text.lower() + + # remove generated quotes -- post-process + text = QUOTE_PATTERN.sub('', text) + + # cleanup numbers + text = NUMBERS_PATTERN.sub('', text) + + # replace all other unwanted characters + if lowercase: + pattern = regex_pattern or ALLOWED_CHARS_PATTERN + else: + pattern = regex_pattern or ALLOWED_CHARS_PATTERN_WITH_UPPERCASE + text = re.sub(pattern, DEFAULT_SEPARATOR, text) + + # remove redundant + text = DUPLICATE_DASH_PATTERN.sub(DEFAULT_SEPARATOR, text).strip(DEFAULT_SEPARATOR) + + # remove stopwords + if stopwords: + if lowercase: + stopwords_lower = [s.lower() for s in stopwords] + words = [w for w in text.split(DEFAULT_SEPARATOR) if w not in stopwords_lower] + else: + words = [w for w in text.split(DEFAULT_SEPARATOR) if w not in stopwords] + text = DEFAULT_SEPARATOR.join(words) + + # finalize user-specific replacements + if replacements: + for old, new in replacements: + text = text.replace(old, new) + + # smart truncate if requested + if max_length > 0: + text = smart_truncate(text, max_length, word_boundary, DEFAULT_SEPARATOR, save_order) + + if separator != DEFAULT_SEPARATOR: + text = text.replace(DEFAULT_SEPARATOR, separator) + + return text + + +def main(): # pragma: no cover + if len(sys.argv) < 2: + print("Usage %s TEXT TO SLUGIFY" % sys.argv[0]) + else: + text = ' '.join(sys.argv[1:]) + print(slugify(text)) diff --git a/licenses/LICENSE-python-nvd3.txt b/licenses/LICENSE-python-nvd3.txt new file mode 100644 index 0000000000000..1add6249e57b4 --- /dev/null +++ b/licenses/LICENSE-python-nvd3.txt @@ -0,0 +1,24 @@ +The MIT License (MIT) + +Python-nvd3 + +Copyright (c) 2013 Arezqui Belaid and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/licenses/LICENSE-python-slugify.txt b/licenses/LICENSE-python-slugify.txt new file mode 100644 index 0000000000000..82af695f594e8 --- /dev/null +++ b/licenses/LICENSE-python-slugify.txt @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) Val Neekman @ Neekware Inc. http://neekware.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. From 59a7a79c7dc446b723185b346b6fc2f6ffef82dc Mon Sep 17 00:00:00 2001 From: Bolke de Bruin Date: Tue, 15 Jan 2019 20:40:49 +0100 Subject: [PATCH 0132/1104] [AIRFLOW-3697] Use vendorized slugify --- airflow/_vendor/nvd3/NVD3Chart.py | 5 +++-- airflow/_vendor/slugify/slugify.py | 5 +---- airflow/www/views.py | 2 +- setup.py | 3 +-- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/airflow/_vendor/nvd3/NVD3Chart.py b/airflow/_vendor/nvd3/NVD3Chart.py index 666993638de65..faefe5d3a0fcf 100644 --- a/airflow/_vendor/nvd3/NVD3Chart.py +++ b/airflow/_vendor/nvd3/NVD3Chart.py @@ -12,7 +12,8 @@ from __future__ import unicode_literals from optparse import OptionParser from jinja2 import Environment, PackageLoader -from slugify import slugify +from airflow._vendor.slugify import slugify + try: import simplejson as json except ImportError: @@ -22,7 +23,7 @@ PAGE_FILENAME = "./page.html" -pl = PackageLoader('nvd3', 'templates') +pl = PackageLoader('airflow._vendor.nvd3', 'templates') jinja2_env = Environment(lstrip_blocks=True, trim_blocks=True, loader=pl) template_content = jinja2_env.get_template(CONTENT_FILENAME) diff --git a/airflow/_vendor/slugify/slugify.py b/airflow/_vendor/slugify/slugify.py index 59e9672a16c5b..0e9886d827138 100644 --- a/airflow/_vendor/slugify/slugify.py +++ b/airflow/_vendor/slugify/slugify.py @@ -13,10 +13,7 @@ _unicode_type = str unichr = chr -try: - import unidecode -except ImportError: - import text_unidecode as unidecode +import text_unidecode as unidecode __all__ = ['slugify', 'smart_truncate'] diff --git a/airflow/www/views.py b/airflow/www/views.py index 9e250541ac49a..84e76ccc73bd0 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -31,7 +31,6 @@ import markdown -import nvd3 import pendulum import sqlalchemy as sqla from flask import ( @@ -67,6 +66,7 @@ from airflow.utils.helpers import alchemy_to_dict, render_log_filename from airflow.utils.json import json_ser from airflow.utils.state import State +from airflow._vendor import nvd3 from airflow.www import utils as wwwutils from airflow.www.app import app, appbuilder from airflow.www.decorators import action_logging, gzipped, has_dag_access diff --git a/setup.py b/setup.py index 03219e312917e..b2831e02108b1 100644 --- a/setup.py +++ b/setup.py @@ -314,13 +314,12 @@ def do_setup(): 'pygments>=2.0.1, <3.0', 'python-daemon>=2.1.1, <2.2', 'python-dateutil>=2.3, <3', - 'python-nvd3==0.15.0', 'requests>=2.20.0, <3', 'setproctitle>=1.1.8, <2', 'sqlalchemy>=1.1.15, <1.3.0', 'tabulate>=0.7.5, <=0.8.2', 'tenacity==4.12.0', - 'text-unidecode==1.2', # Avoid GPL dependency, pip uses reverse order(!) + 'text-unidecode==1.2', 'thrift>=0.9.2', 'tzlocal>=1.4', 'unicodecsv>=0.14.1', From 8762da31981aa4f8db06308a4f6cc520ee9414cd Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Sat, 23 Feb 2019 14:58:16 -0800 Subject: [PATCH 0133/1104] [AIRFLOW-3940] Migrate Hive Metastore plugin to FAB (#4758) Migrate this plugin from Flask-Admin to Flask-AppBuilder. I tested the plugin by: 1. Creating a dummy table named `foo` with a single column in a local Hive cluster 2. Configuring the connections `metastore_default` (Thrift) and `metastore_mysql` (MySQL) 3. Symlink `airflow/contrib/plugins/metastore_browser/` to `$AIRFLOW_HOME/plugins/` It'd be nice to automate this tests, but I decided not to invest the effort given that there's some discussion of removing this plugin entirely. ` --- .../contrib/plugins/metastore_browser/main.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/airflow/contrib/plugins/metastore_browser/main.py b/airflow/contrib/plugins/metastore_browser/main.py index f0b7ddc25e2a3..f4f9182155244 100644 --- a/airflow/contrib/plugins/metastore_browser/main.py +++ b/airflow/contrib/plugins/metastore_browser/main.py @@ -21,14 +21,13 @@ import json from flask import Blueprint, request -from flask_admin import BaseView, expose +from flask_appbuilder import BaseView, expose import pandas as pd from airflow.hooks.hive_hooks import HiveMetastoreHook, HiveCliHook from airflow.hooks.mysql_hook import MySqlHook from airflow.hooks.presto_hook import PrestoHook from airflow.plugins_manager import AirflowPlugin -from airflow.www import utils as wwwutils from airflow.www.decorators import gzipped METASTORE_CONN_ID = 'metastore_default' @@ -45,7 +44,9 @@ # Creating a flask admin BaseView -class MetastoreBrowserView(BaseView, wwwutils.DataProfilingMixin): +class MetastoreBrowserView(BaseView): + + default_view = 'index' @expose('/') def index(self): @@ -60,14 +61,14 @@ def index(self): h = MySqlHook(METASTORE_MYSQL_CONN_ID) df = h.get_pandas_df(sql) df.db = ( - '' + df.db + '') table = df.to_html( classes="table table-striped table-bordered table-hover", index=False, escape=False, na_rep='',) - return self.render( + return self.render_template( "metastore_browser/dbs.html", table=table) @expose('/table/') @@ -75,7 +76,7 @@ def table(self): table_name = request.args.get("table") m = HiveMetastoreHook(METASTORE_CONN_ID) table = m.get_table(table_name) - return self.render( + return self.render_template( "metastore_browser/table.html", table=table, table_name=table_name, datetime=datetime, int=int) @@ -84,7 +85,7 @@ def db(self): db = request.args.get("db") m = HiveMetastoreHook(METASTORE_CONN_ID) tables = sorted(m.get_tables(db=db), key=lambda x: x.tableName) - return self.render( + return self.render_template( "metastore_browser/db.html", tables=tables, db=db) @gzipped @@ -163,8 +164,6 @@ def ddl(self): return h.run_cli(sql) -v = MetastoreBrowserView(category="Plugins", name="Hive Metadata Browser") - # Creating a flask blueprint to integrate the templates and static folder bp = Blueprint( "metastore_browser", __name__, @@ -177,4 +176,6 @@ def ddl(self): class MetastoreBrowserPlugin(AirflowPlugin): name = "metastore_browser" flask_blueprints = [bp] - admin_views = [v] + appbuilder_views = [{"name": "Hive Metadata Browser", + "category": "Plugins", + "view": MetastoreBrowserView()}] From f3dfc9495be41221bd08ea753555df0975deb961 Mon Sep 17 00:00:00 2001 From: BasPH Date: Sun, 24 Feb 2019 16:24:13 +0100 Subject: [PATCH 0134/1104] [AIRFLOW-3944] Remove code smells (#4762) - Removed mutable default arguments - Replace function calls by literals - Remove reusing same variable names - Make function name lowercase - Remove redundant parentheses - Remove unreachable code - Replaced unncessary list by set in dataproc_operator.py --- airflow/bin/cli.py | 10 ++- airflow/configuration.py | 2 +- airflow/contrib/hooks/openfaas_hook.py | 8 +-- airflow/contrib/hooks/qubole_hook.py | 2 +- .../contrib/operators/bigquery_operator.py | 4 +- .../contrib/operators/dataproc_operator.py | 72 +++++++++---------- airflow/executors/__init__.py | 2 +- airflow/jobs.py | 2 +- airflow/models/__init__.py | 8 +-- airflow/operators/python_operator.py | 2 - airflow/utils/dag_processing.py | 4 +- airflow/utils/helpers.py | 4 +- airflow/www/views.py | 4 +- 13 files changed, 60 insertions(+), 64 deletions(-) diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index 3be7576da868f..fae3cb7a27489 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -55,7 +55,7 @@ from airflow import jobs, settings from airflow import configuration as conf from airflow.exceptions import AirflowException, AirflowWebServerTimeout -from airflow.executors import GetDefaultExecutor +from airflow.executors import get_default_executor from airflow.models import DagModel, DagBag, TaskInstance, DagRun, Variable, DAG from airflow.models.connection import Connection from airflow.models.dagpickle import DagPickle @@ -458,7 +458,7 @@ def _run(args, dag, ti): print(e) raise e - executor = GetDefaultExecutor() + executor = get_default_executor() executor.start() print("Sending to executor.") executor.queue_task_instance( @@ -1045,10 +1045,8 @@ def serve_logs(filename): mimetype="application/json", as_attachment=False) - WORKER_LOG_SERVER_PORT = \ - int(conf.get('celery', 'WORKER_LOG_SERVER_PORT')) - flask_app.run( - host='0.0.0.0', port=WORKER_LOG_SERVER_PORT) + worker_log_server_port = int(conf.get('celery', 'WORKER_LOG_SERVER_PORT')) + flask_app.run(host='0.0.0.0', port=worker_log_server_port) @cli_utils.action_logging diff --git a/airflow/configuration.py b/airflow/configuration.py index a74d133fab875..0f4c434ec383c 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -383,7 +383,7 @@ def as_dict( opt = self._get_env_var_option(section, key) except ValueError: continue - if (not display_sensitive and ev != 'AIRFLOW__CORE__UNIT_TEST_MODE'): + if not display_sensitive and ev != 'AIRFLOW__CORE__UNIT_TEST_MODE': opt = '< hidden >' elif raw: opt = opt.replace('%', '%%') diff --git a/airflow/contrib/hooks/openfaas_hook.py b/airflow/contrib/hooks/openfaas_hook.py index 8f5062cbc708f..00e3667a77c65 100644 --- a/airflow/contrib/hooks/openfaas_hook.py +++ b/airflow/contrib/hooks/openfaas_hook.py @@ -60,7 +60,7 @@ def deploy_function(self, overwrite_function_if_exist, body): url = self.get_conn().host + self.DEPLOY_FUNCTION self.log.info("Deploying function " + url) response = requests.post(url, body) - if (response.status_code != OK_STATUS_CODE): + if response.status_code != OK_STATUS_CODE: self.log.error("Response status " + str(response.status_code)) self.log.error("Failed to deploy") raise AirflowException('failed to deploy') @@ -71,7 +71,7 @@ def invoke_async_function(self, body): url = self.get_conn().host + self.INVOKE_ASYNC_FUNCTION + self.function_name self.log.info("Invoking function " + url) response = requests.post(url, body) - if (response.ok): + if response.ok: self.log.info("Invoked " + self.function_name) else: self.log.error("Response status " + str(response.status_code)) @@ -81,7 +81,7 @@ def update_function(self, body): url = self.get_conn().host + self.UPDATE_FUNCTION self.log.info("Updating function " + url) response = requests.put(url, body) - if (response.status_code != OK_STATUS_CODE): + if response.status_code != OK_STATUS_CODE: self.log.error("Response status " + str(response.status_code)) self.log.error("Failed to update response " + response.content.decode("utf-8")) raise AirflowException('failed to update ' + self.function_name) @@ -92,7 +92,7 @@ def does_function_exist(self): url = self.get_conn().host + self.GET_FUNCTION + self.function_name response = requests.get(url) - if (response.ok): + if response.ok: return True else: self.log.error("Failed to find function " + self.function_name) diff --git a/airflow/contrib/hooks/qubole_hook.py b/airflow/contrib/hooks/qubole_hook.py index df11a50d5d8d3..69247cb20be31 100755 --- a/airflow/contrib/hooks/qubole_hook.py +++ b/airflow/contrib/hooks/qubole_hook.py @@ -204,7 +204,7 @@ def create_cmd_args(self, context): args = [] cmd_type = self.kwargs['command_type'] inplace_args = None - tags = set([self.dag_id, self.task_id, context['run_id']]) + tags = {self.dag_id, self.task_id, context['run_id']} positional_args_list = flatten_list(POSITIONAL_ARGS.values()) for k, v in self.kwargs.items(): diff --git a/airflow/contrib/operators/bigquery_operator.py b/airflow/contrib/operators/bigquery_operator.py index 10e3f840a8e31..f3a43baa24b54 100644 --- a/airflow/contrib/operators/bigquery_operator.py +++ b/airflow/contrib/operators/bigquery_operator.py @@ -435,7 +435,7 @@ def __init__(self, bigquery_conn_id='bigquery_default', google_cloud_storage_conn_id='google_cloud_default', delegate_to=None, - src_fmt_configs={}, + src_fmt_configs=None, labels=None, *args, **kwargs): @@ -462,7 +462,7 @@ def __init__(self, self.google_cloud_storage_conn_id = google_cloud_storage_conn_id self.delegate_to = delegate_to - self.src_fmt_configs = src_fmt_configs + self.src_fmt_configs = src_fmt_configs if src_fmt_configs is not None else dict() self.labels = labels def execute(self, context): diff --git a/airflow/contrib/operators/dataproc_operator.py b/airflow/contrib/operators/dataproc_operator.py index e64cd25ef534f..749f115318dc9 100644 --- a/airflow/contrib/operators/dataproc_operator.py +++ b/airflow/contrib/operators/dataproc_operator.py @@ -725,12 +725,12 @@ class DataProcPigOperator(BaseOperator): :param region: The specified region where the dataproc cluster is created. :type region: str :param job_error_states: Job states that should be considered error states. - Any states in this list will result in an error being raised and failure of the + Any states in this set will result in an error being raised and failure of the task. Eg, if the ``CANCELLED`` state should also be considered a task failure, - pass in ``['ERROR', 'CANCELLED']``. Possible values are currently only + pass in ``{'ERROR', 'CANCELLED'}``. Possible values are currently only ``'ERROR'`` and ``'CANCELLED'``, but could change in the future. Defaults to - ``['ERROR']``. - :type job_error_states: list + ``{'ERROR'}``. + :type job_error_states: set :var dataproc_job_id: The actual "jobId" as submitted to the Dataproc API. This is useful for identifying or linking to the job in the Google Cloud Console Dataproc UI, as the actual "jobId" submitted to the Dataproc API is appended with @@ -754,7 +754,7 @@ def __init__( gcp_conn_id='google_cloud_default', delegate_to=None, region='global', - job_error_states=['ERROR'], + job_error_states=None, *args, **kwargs): @@ -769,7 +769,7 @@ def __init__( self.dataproc_properties = dataproc_pig_properties self.dataproc_jars = dataproc_pig_jars self.region = region - self.job_error_states = job_error_states + self.job_error_states = job_error_states if job_error_states is not None else {'ERROR'} def execute(self, context): hook = DataProcHook(gcp_conn_id=self.gcp_conn_id, @@ -822,12 +822,12 @@ class DataProcHiveOperator(BaseOperator): :param region: The specified region where the dataproc cluster is created. :type region: str :param job_error_states: Job states that should be considered error states. - Any states in this list will result in an error being raised and failure of the + Any states in this set will result in an error being raised and failure of the task. Eg, if the ``CANCELLED`` state should also be considered a task failure, - pass in ``['ERROR', 'CANCELLED']``. Possible values are currently only + pass in ``{'ERROR', 'CANCELLED'}``. Possible values are currently only ``'ERROR'`` and ``'CANCELLED'``, but could change in the future. Defaults to - ``['ERROR']``. - :type job_error_states: list + ``{'ERROR'}``. + :type job_error_states: set :var dataproc_job_id: The actual "jobId" as submitted to the Dataproc API. This is useful for identifying or linking to the job in the Google Cloud Console Dataproc UI, as the actual "jobId" submitted to the Dataproc API is appended with @@ -851,7 +851,7 @@ def __init__( gcp_conn_id='google_cloud_default', delegate_to=None, region='global', - job_error_states=['ERROR'], + job_error_states=None, *args, **kwargs): @@ -866,7 +866,7 @@ def __init__( self.dataproc_properties = dataproc_hive_properties self.dataproc_jars = dataproc_hive_jars self.region = region - self.job_error_states = job_error_states + self.job_error_states = job_error_states if job_error_states is not None else {'ERROR'} def execute(self, context): hook = DataProcHook(gcp_conn_id=self.gcp_conn_id, @@ -921,12 +921,12 @@ class DataProcSparkSqlOperator(BaseOperator): :param region: The specified region where the dataproc cluster is created. :type region: str :param job_error_states: Job states that should be considered error states. - Any states in this list will result in an error being raised and failure of the + Any states in this set will result in an error being raised and failure of the task. Eg, if the ``CANCELLED`` state should also be considered a task failure, - pass in ``['ERROR', 'CANCELLED']``. Possible values are currently only + pass in ``{'ERROR', 'CANCELLED'}``. Possible values are currently only ``'ERROR'`` and ``'CANCELLED'``, but could change in the future. Defaults to - ``['ERROR']``. - :type job_error_states: list + ``{'ERROR'}``. + :type job_error_states: set :var dataproc_job_id: The actual "jobId" as submitted to the Dataproc API. This is useful for identifying or linking to the job in the Google Cloud Console Dataproc UI, as the actual "jobId" submitted to the Dataproc API is appended with @@ -950,7 +950,7 @@ def __init__( gcp_conn_id='google_cloud_default', delegate_to=None, region='global', - job_error_states=['ERROR'], + job_error_states=None, *args, **kwargs): @@ -965,7 +965,7 @@ def __init__( self.dataproc_properties = dataproc_spark_properties self.dataproc_jars = dataproc_spark_jars self.region = region - self.job_error_states = job_error_states + self.job_error_states = job_error_states if job_error_states is not None else {'ERROR'} def execute(self, context): hook = DataProcHook(gcp_conn_id=self.gcp_conn_id, @@ -1027,12 +1027,12 @@ class DataProcSparkOperator(BaseOperator): :param region: The specified region where the dataproc cluster is created. :type region: str :param job_error_states: Job states that should be considered error states. - Any states in this list will result in an error being raised and failure of the + Any states in this set will result in an error being raised and failure of the task. Eg, if the ``CANCELLED`` state should also be considered a task failure, - pass in ``['ERROR', 'CANCELLED']``. Possible values are currently only + pass in ``{'ERROR', 'CANCELLED'}``. Possible values are currently only ``'ERROR'`` and ``'CANCELLED'``, but could change in the future. Defaults to - ``['ERROR']``. - :type job_error_states: list + ``{'ERROR'}``. + :type job_error_states: set :var dataproc_job_id: The actual "jobId" as submitted to the Dataproc API. This is useful for identifying or linking to the job in the Google Cloud Console Dataproc UI, as the actual "jobId" submitted to the Dataproc API is appended with @@ -1058,7 +1058,7 @@ def __init__( gcp_conn_id='google_cloud_default', delegate_to=None, region='global', - job_error_states=['ERROR'], + job_error_states=None, *args, **kwargs): @@ -1075,7 +1075,7 @@ def __init__( self.dataproc_properties = dataproc_spark_properties self.dataproc_jars = dataproc_spark_jars self.region = region - self.job_error_states = job_error_states + self.job_error_states = job_error_states if job_error_states is not None else {'ERROR'} def execute(self, context): hook = DataProcHook(gcp_conn_id=self.gcp_conn_id, @@ -1135,12 +1135,12 @@ class DataProcHadoopOperator(BaseOperator): :param region: The specified region where the dataproc cluster is created. :type region: str :param job_error_states: Job states that should be considered error states. - Any states in this list will result in an error being raised and failure of the + Any states in this set will result in an error being raised and failure of the task. Eg, if the ``CANCELLED`` state should also be considered a task failure, - pass in ``['ERROR', 'CANCELLED']``. Possible values are currently only + pass in ``{'ERROR', 'CANCELLED'}``. Possible values are currently only ``'ERROR'`` and ``'CANCELLED'``, but could change in the future. Defaults to - ``['ERROR']``. - :type job_error_states: list + ``{'ERROR'}``. + :type job_error_states: set :var dataproc_job_id: The actual "jobId" as submitted to the Dataproc API. This is useful for identifying or linking to the job in the Google Cloud Console Dataproc UI, as the actual "jobId" submitted to the Dataproc API is appended with @@ -1166,7 +1166,7 @@ def __init__( gcp_conn_id='google_cloud_default', delegate_to=None, region='global', - job_error_states=['ERROR'], + job_error_states=None, *args, **kwargs): @@ -1183,7 +1183,7 @@ def __init__( self.dataproc_properties = dataproc_hadoop_properties self.dataproc_jars = dataproc_hadoop_jars self.region = region - self.job_error_states = job_error_states + self.job_error_states = job_error_states if job_error_states is not None else {'ERROR'} def execute(self, context): hook = DataProcHook(gcp_conn_id=self.gcp_conn_id, @@ -1243,12 +1243,12 @@ class DataProcPySparkOperator(BaseOperator): :param region: The specified region where the dataproc cluster is created. :type region: str :param job_error_states: Job states that should be considered error states. - Any states in this list will result in an error being raised and failure of the + Any states in this set will result in an error being raised and failure of the task. Eg, if the ``CANCELLED`` state should also be considered a task failure, - pass in ``['ERROR', 'CANCELLED']``. Possible values are currently only + pass in ``{'ERROR', 'CANCELLED'}``. Possible values are currently only ``'ERROR'`` and ``'CANCELLED'``, but could change in the future. Defaults to - ``['ERROR']``. - :type job_error_states: list + ``{'ERROR'}``. + :type job_error_states: set :var dataproc_job_id: The actual "jobId" as submitted to the Dataproc API. This is useful for identifying or linking to the job in the Google Cloud Console Dataproc UI, as the actual "jobId" submitted to the Dataproc API is appended with @@ -1301,7 +1301,7 @@ def __init__( gcp_conn_id='google_cloud_default', delegate_to=None, region='global', - job_error_states=['ERROR'], + job_error_states=None, *args, **kwargs): @@ -1318,7 +1318,7 @@ def __init__( self.dataproc_properties = dataproc_pyspark_properties self.dataproc_jars = dataproc_pyspark_jars self.region = region - self.job_error_states = job_error_states + self.job_error_states = job_error_states if job_error_states is not None else {'ERROR'} def execute(self, context): hook = DataProcHook( diff --git a/airflow/executors/__init__.py b/airflow/executors/__init__.py index c33bbb634d17b..b9d8a020a70ae 100644 --- a/airflow/executors/__init__.py +++ b/airflow/executors/__init__.py @@ -36,7 +36,7 @@ def _integrate_plugins(): globals()[executors_module._name] = executors_module -def GetDefaultExecutor(): +def get_default_executor(): """Creates a new instance of the configured executor if none exists and returns it""" global DEFAULT_EXECUTOR diff --git a/airflow/jobs.py b/airflow/jobs.py index 0253e0eb341df..20222204a2e55 100644 --- a/airflow/jobs.py +++ b/airflow/jobs.py @@ -100,7 +100,7 @@ class BaseJob(Base, LoggingMixin): def __init__( self, - executor=executors.GetDefaultExecutor(), + executor=executors.get_default_executor(), heartrate=conf.getfloat('scheduler', 'JOB_HEARTBEAT_SEC'), *args, **kwargs): self.hostname = get_hostname() diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 1161066288a21..da02d77b0a8e1 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -74,7 +74,7 @@ import six from airflow import settings, utils -from airflow.executors import GetDefaultExecutor, LocalExecutor +from airflow.executors import get_default_executor, LocalExecutor from airflow import configuration from airflow.exceptions import ( AirflowDagCycleException, AirflowException, AirflowSkipException, AirflowTaskTimeout, @@ -293,7 +293,7 @@ def __init__( # do not use default arg in signature, to fix import cycle on plugin load if executor is None: - executor = GetDefaultExecutor() + executor = get_default_executor() dag_folder = dag_folder or settings.DAGS_FOLDER self.log.info("Filling up the DagBag from %s", dag_folder) self.dag_folder = dag_folder @@ -3813,7 +3813,7 @@ def get_task(self, task_id): @provide_session def pickle_info(self, session=None): - d = {} + d = dict() d['is_picklable'] = True try: dttm = timezone.utcnow() @@ -3961,7 +3961,7 @@ def run( if not executor and local: executor = LocalExecutor() elif not executor: - executor = GetDefaultExecutor() + executor = get_default_executor() job = BackfillJob( self, start_date=start_date, diff --git a/airflow/operators/python_operator.py b/airflow/operators/python_operator.py index dc6639ca08ab4..daa1ce27bb05c 100644 --- a/airflow/operators/python_operator.py +++ b/airflow/operators/python_operator.py @@ -420,5 +420,3 @@ def _generate_python_code(self): python_callable_lines=dedent(inspect.getsource(fn)), python_callable_name=fn.__name__, pickling_library=pickling_library) - - self.log.info("Done.") diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index fa794fefc2c69..5026c3fd7fb4d 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -336,8 +336,8 @@ def list_py_file_paths(directory, safe_mode=True, # Airflow DAG definition. might_contain_dag = True if safe_mode and not zipfile.is_zipfile(file_path): - with open(file_path, 'rb') as f: - content = f.read() + with open(file_path, 'rb') as fp: + content = fp.read() might_contain_dag = all( [s in content for s in (b'DAG', b'airflow')]) diff --git a/airflow/utils/helpers.py b/airflow/utils/helpers.py index a311a62418622..404321ebc0ee8 100644 --- a/airflow/utils/helpers.py +++ b/airflow/utils/helpers.py @@ -76,8 +76,8 @@ def alchemy_to_dict(obj): def ask_yesno(question): - yes = set(['yes', 'y']) - no = set(['no', 'n']) + yes = {'yes', 'y'} + no = {'no', 'n'} done = False print(question) diff --git a/airflow/www/views.py b/airflow/www/views.py index 84e76ccc73bd0..70ca997a2e320 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -763,8 +763,8 @@ def run(self): ignore_task_deps = request.args.get('ignore_task_deps') == "true" ignore_ti_state = request.args.get('ignore_ti_state') == "true" - from airflow.executors import GetDefaultExecutor - executor = GetDefaultExecutor() + from airflow.executors import get_default_executor + executor = get_default_executor() valid_celery_config = False valid_kubernetes_config = False From 31cd02f8030f851c6735a335c01e9c150c3e8d0f Mon Sep 17 00:00:00 2001 From: Andrew Stahlman Date: Sun, 24 Feb 2019 13:52:49 -0800 Subject: [PATCH 0135/1104] [AIRFLOW-3926] Remove references to Flask-Admin (#4759) Remove all remaining references to Flask-Admin and remove it as a dependency. This should be the final step in the deprecation of Flask-Admin in favor of Flask-AppBuilder. --- airflow/__init__.py | 5 ---- .../contrib/plugins/metastore_browser/main.py | 2 +- airflow/www/views.py | 12 --------- docs/plugins.rst | 27 +++---------------- setup.py | 1 - tests/plugins/test_plugin.py | 24 +---------------- 6 files changed, 5 insertions(+), 66 deletions(-) diff --git a/airflow/__init__.py b/airflow/__init__.py index d010fe4c74840..723c8c28d6d55 100644 --- a/airflow/__init__.py +++ b/airflow/__init__.py @@ -35,7 +35,6 @@ # flake8: noqa: F401 from airflow import settings, configuration as conf from airflow.models import DAG -from flask_admin import BaseView from importlib import import_module from airflow.exceptions import AirflowException @@ -72,10 +71,6 @@ def load_login(): raise AirflowException("Failed to import authentication backend") -class AirflowViewPlugin(BaseView): - pass - - class AirflowMacroPlugin(object): def __init__(self, namespace): self.namespace = namespace diff --git a/airflow/contrib/plugins/metastore_browser/main.py b/airflow/contrib/plugins/metastore_browser/main.py index f4f9182155244..7218636850cc0 100644 --- a/airflow/contrib/plugins/metastore_browser/main.py +++ b/airflow/contrib/plugins/metastore_browser/main.py @@ -43,7 +43,7 @@ pd.set_option('display.max_colwidth', -1) -# Creating a flask admin BaseView +# Creating a Flask-AppBuilder BaseView class MetastoreBrowserView(BaseView): default_view = 'index' diff --git a/airflow/www/views.py b/airflow/www/views.py index 70ca997a2e320..eb047eb34b224 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -2411,18 +2411,6 @@ def action_set_retry(self, tis): self.update_redirect() return redirect(self.get_redirect()) - def get_one(self, id): - """ - As a workaround for AIRFLOW-252, this method overrides Flask-Admin's - ModelView.get_one(). - - TODO: this method should be removed once the below bug is fixed on - Flask-Admin side. https://github.com/flask-admin/flask-admin/issues/1226 - """ - task_id, dag_id, execution_date = iterdecode(id) # noqa - execution_date = pendulum.parse(execution_date) - return self.session.query(self.model).get((task_id, dag_id, execution_date)) - class DagModelView(AirflowModelView): route_base = '/dagmodel' diff --git a/docs/plugins.rst b/docs/plugins.rst index 010977956b4b1..9e126789c187d 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -86,13 +86,8 @@ looks like: executors = [] # A list of references to inject into the macros namespace macros = [] - # A list of objects created from a class derived - # from flask_admin.BaseView - admin_views = [] - # A list of Blueprint object created from flask.Blueprint. For use with the flask_admin based GUI + # A list of Blueprint object created from flask.Blueprint. For use with the flask_appbuilder based GUI flask_blueprints = [] - # A list of menu links (flask_admin.base.MenuLink). For use with the flask_admin based GUI - menu_links = [] # A list of dictionaries containing FlaskAppBuilder BaseView object and some metadata. See example below appbuilder_views = [] # A list of dictionaries containing FlaskAppBuilder BaseView object and some metadata. See example below @@ -142,9 +137,7 @@ definitions in Airflow. from airflow.plugins_manager import AirflowPlugin from flask import Blueprint - from flask_admin import BaseView, expose - from flask_admin.base import MenuLink - from flask_appbuilder import BaseView as AppBuilderBaseView + from flask_appbuilder import expose, BaseView as AppBuilderBaseView # Importing base classes that we need to derive from airflow.hooks.base_hook import BaseHook @@ -172,14 +165,6 @@ definitions in Airflow. def plugin_macro(): pass - # Creating a flask admin BaseView - class TestView(BaseView): - @expose('/') - def test(self): - # in this example, put your test_plugin/test.html template at airflow/plugins/templates/test_plugin/test.html - return self.render("test_plugin/test.html", content="Hello galaxy!") - v = TestView(category="Test Plugin", name="Test View") - # Creating a flask blueprint to integrate the templates and static folder bp = Blueprint( "test_plugin", __name__, @@ -187,11 +172,6 @@ definitions in Airflow. static_folder='static', static_url_path='/static/test_plugin') - ml = MenuLink( - category='Test Plugin', - name='Test Menu Link', - url='https://airflow.apache.org/') - # Creating a flask appbuilder BaseView class TestAppBuilderBaseView(AppBuilderBaseView): default_view = "test" @@ -199,6 +179,7 @@ definitions in Airflow. @expose("/") def test(self): return self.render("test_plugin/test.html", content="Hello galaxy!") + v_appbuilder_view = TestAppBuilderBaseView() v_appbuilder_package = {"name": "Test View", "category": "Test Plugin", @@ -218,9 +199,7 @@ definitions in Airflow. hooks = [PluginHook] executors = [PluginExecutor] macros = [plugin_macro] - admin_views = [v] flask_blueprints = [bp] - menu_links = [ml] appbuilder_views = [v_appbuilder_package] appbuilder_menu_items = [appbuilder_mitem] diff --git a/setup.py b/setup.py index b2831e02108b1..4fa454625fb68 100644 --- a/setup.py +++ b/setup.py @@ -295,7 +295,6 @@ def do_setup(): 'enum34~=1.1.6;python_version<"3.4"', 'flask>=1.0, <2.0', 'flask-appbuilder==1.12.3', - 'flask-admin==1.5.3', 'flask-caching>=1.3.3, <1.4.0', 'flask-login>=0.3, <0.5', 'flask-swagger==0.2.13', diff --git a/tests/plugins/test_plugin.py b/tests/plugins/test_plugin.py index 8bf0416b220ae..00f2c35db16ac 100644 --- a/tests/plugins/test_plugin.py +++ b/tests/plugins/test_plugin.py @@ -21,9 +21,7 @@ from airflow.plugins_manager import AirflowPlugin from flask import Blueprint -from flask_admin import BaseView, expose -from flask_admin.base import MenuLink -from flask_appbuilder import BaseView as AppBuilderBaseView +from flask_appbuilder import expose, BaseView as AppBuilderBaseView # Importing base classes that we need to derive from airflow.hooks.base_hook import BaseHook @@ -57,18 +55,6 @@ def plugin_macro(): pass -# Creating a flask admin BaseView -class PluginTestView(BaseView): - @expose('/') - def test(self): - # in this example, put your test_plugin/test.html - # template at airflow/plugins/templates/test_plugin/test.html - return self.render("test_plugin/test.html", content="Hello galaxy!") - - -v = PluginTestView(category="Test Plugin", name="Test View") - - # Creating a flask appbuilder BaseView class PluginTestAppBuilderBaseView(AppBuilderBaseView): default_view = "test" @@ -98,12 +84,6 @@ def test(self): static_url_path='/static/test_plugin') -ml = MenuLink( - category='Test Plugin', - name="Test Menu Link", - url="https://airflow.apache.org/") - - # Defining the plugin class class AirflowTestPlugin(AirflowPlugin): name = "test_plugin" @@ -112,9 +92,7 @@ class AirflowTestPlugin(AirflowPlugin): hooks = [PluginHook] executors = [PluginExecutor] macros = [plugin_macro] - admin_views = [v] flask_blueprints = [bp] - menu_links = [ml] appbuilder_views = [v_appbuilder_package] appbuilder_menu_items = [appbuilder_mitem] From c50a85146373bafb0cbf86850f834d63bd4dede8 Mon Sep 17 00:00:00 2001 From: Joshua Carp Date: Mon, 25 Feb 2019 00:35:14 -0500 Subject: [PATCH 0136/1104] [AIRFLOW-3932] Update unit tests and documentation for safe mode flag. (#4760) --- airflow/models/__init__.py | 15 ++++++++++----- docs/concepts.rst | 6 ++++-- tests/models.py | 38 +++++++++++++++++++++++++++++++++++++- 3 files changed, 51 insertions(+), 8 deletions(-) diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index da02d77b0a8e1..b3b6186c0ddf1 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -289,7 +289,8 @@ def __init__( self, dag_folder=None, executor=None, - include_examples=configuration.conf.getboolean('core', 'LOAD_EXAMPLES')): + include_examples=configuration.conf.getboolean('core', 'LOAD_EXAMPLES'), + safe_mode=configuration.conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE')): # do not use default arg in signature, to fix import cycle on plugin load if executor is None: @@ -304,7 +305,10 @@ def __init__( self.import_errors = {} self.has_logged = False - self.collect_dags(dag_folder=dag_folder, include_examples=include_examples) + self.collect_dags( + dag_folder=dag_folder, + include_examples=include_examples, + safe_mode=safe_mode) def size(self): """ @@ -539,7 +543,8 @@ def collect_dags( self, dag_folder=None, only_if_updated=True, - include_examples=configuration.conf.getboolean('core', 'LOAD_EXAMPLES')): + include_examples=configuration.conf.getboolean('core', 'LOAD_EXAMPLES'), + safe_mode=configuration.conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE')): """ Given a file path or a folder, this method looks for python modules, imports them and adds them to the dagbag collection. @@ -560,13 +565,13 @@ def collect_dags( FileLoadStat = namedtuple( 'FileLoadStat', "file duration dag_num task_num dags") - safe_mode = configuration.conf.getboolean('core', 'dag_discovery_safe_mode') for filepath in list_py_file_paths(dag_folder, safe_mode=safe_mode, include_examples=include_examples): try: ts = timezone.utcnow() found_dags = self.process_file( - filepath, only_if_updated=only_if_updated) + filepath, only_if_updated=only_if_updated, + safe_mode=safe_mode) td = timezone.utcnow() - ts td = td.total_seconds() + ( diff --git a/docs/concepts.rst b/docs/concepts.rst index 9ca1978217739..848543c3598c7 100644 --- a/docs/concepts.rst +++ b/docs/concepts.rst @@ -52,8 +52,10 @@ the ``DAG`` objects. You can have as many DAGs as you want, each describing an arbitrary number of tasks. In general, each one should correspond to a single logical workflow. -.. note:: When searching for DAGs, Airflow will only consider files where the string - "airflow" and "DAG" both appear in the contents of the ``.py`` file. +.. note:: When searching for DAGs, Airflow only considers python files + that contain the strings "airflow" and "DAG" by default. To consider + all python files instead, disable the ``DAG_DISCOVERY_SAFE_MODE`` + configuration flag. Scope ----- diff --git a/tests/models.py b/tests/models.py index 69980048730c9..7fae48f299a7a 100644 --- a/tests/models.py +++ b/tests/models.py @@ -32,6 +32,7 @@ import unittest import urllib import uuid +import shutil from tempfile import NamedTemporaryFile, mkdtemp import pendulum @@ -1443,7 +1444,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - os.rmdir(cls.empty_dir) + shutil.rmtree(cls.empty_dir) def test_get_existing_dag(self): """ @@ -1479,6 +1480,41 @@ def test_dont_load_example(self): self.assertEqual(dagbag.size(), 0) + def test_safe_mode_heuristic_match(self): + """With safe mode enabled, a file matching the discovery heuristics + should be discovered. + """ + with NamedTemporaryFile(dir=self.empty_dir, suffix=".py") as fp: + fp.write("# airflow".encode()) + fp.write("# DAG".encode()) + fp.flush() + dagbag = models.DagBag( + dag_folder=self.empty_dir, include_examples=False, safe_mode=True) + self.assertEqual(len(dagbag.dagbag_stats), 1) + self.assertEqual( + dagbag.dagbag_stats[0].file, + "/{}".format(os.path.basename(fp.name))) + + def test_safe_mode_heuristic_mismatch(self): + """With safe mode enabled, a file not matching the discovery heuristics + should not be discovered. + """ + with NamedTemporaryFile(dir=self.empty_dir, suffix=".py"): + dagbag = models.DagBag( + dag_folder=self.empty_dir, include_examples=False, safe_mode=True) + self.assertEqual(len(dagbag.dagbag_stats), 0) + + def test_safe_mode_disabled(self): + """With safe mode disabled, an empty python file should be discovered. + """ + with NamedTemporaryFile(dir=self.empty_dir, suffix=".py") as fp: + dagbag = models.DagBag( + dag_folder=self.empty_dir, include_examples=False, safe_mode=False) + self.assertEqual(len(dagbag.dagbag_stats), 1) + self.assertEqual( + dagbag.dagbag_stats[0].file, + "/{}".format(os.path.basename(fp.name))) + def test_process_file_that_contains_multi_bytes_char(self): """ test that we're able to parse file that contains multi-byte char From bfa81b53597907ed58b2e01a69ba9fd52ce4a7b9 Mon Sep 17 00:00:00 2001 From: diederikwp <46847987+diederikwp@users.noreply.github.com> Date: Mon, 25 Feb 2019 09:14:05 +0100 Subject: [PATCH 0137/1104] [AIRFLOW-3945] Stop inserting row when permission views unchanged (#4764) - Stop inserting a row where only the id is not NULL in table ab_permission_view_role when there are no permission views to update. - Add a test for above issue --- airflow/www/security.py | 3 ++- tests/www/test_security.py | 10 ++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/airflow/www/security.py b/airflow/www/security.py index 2da1d70542b04..8577a1f58d4e2 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -428,7 +428,8 @@ def merge_pv(perm, view_menu): update_perm_views.append({'permission_view_id': perm_view_id, 'role_id': role.id}) - self.get_session.execute(ab_perm_view_role.insert(), update_perm_views) + if update_perm_views: + self.get_session.execute(ab_perm_view_role.insert(), update_perm_views) self.get_session.commit() def update_admin_perm_view(self): diff --git a/tests/www/test_security.py b/tests/www/test_security.py index 93ebb22956ff2..909cfa9c81305 100644 --- a/tests/www/test_security.py +++ b/tests/www/test_security.py @@ -26,6 +26,7 @@ from flask import Flask from flask_appbuilder import AppBuilder, SQLA, Model, has_access, expose from flask_appbuilder.models.sqla.interface import SQLAInterface +from flask_appbuilder.security.sqla import models as sqla_models from flask_appbuilder.views import ModelView, BaseView from sqlalchemy import Column, Integer, String, Date, Float @@ -261,6 +262,15 @@ def test_access_control_stale_perms_are_revoked(self): dag_id='access_control_test', ) + def test_no_additional_dag_permission_views_created(self): + ab_perm_view_role = sqla_models.assoc_permissionview_role + + self.security_manager.sync_roles() + num_pv_before = self.db.session().query(ab_perm_view_role).count() + self.security_manager.sync_roles() + num_pv_after = self.db.session().query(ab_perm_view_role).count() + self.assertEqual(num_pv_before, num_pv_after) + def expect_user_is_in_role(self, user, rolename): self.security_manager.init_role(rolename, [], []) role = self.security_manager.find_role(rolename) From 74b22337b45a1eb25585d52e35694e6b0eb81f03 Mon Sep 17 00:00:00 2001 From: Pol Osei Date: Mon, 25 Feb 2019 13:27:43 +0200 Subject: [PATCH 0138/1104] [AIRFLOW-XXX] Add 4G Capital to list of Airflow users. (#4771) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 7ceb9b8a33b96..0798bd1568c54 100644 --- a/README.md +++ b/README.md @@ -105,6 +105,7 @@ if you may. Currently **officially** using Airflow: +1. [4G Capital](http://www.4g-capital.com/) [[@posei](https://github.com/posei)] 1. [6play](https://www.6play.fr) [[@lemourA](https://github.com/lemoura), [@achaussende](https://github.com/achaussende), [@d-nguyen](https://github.com/d-nguyen), [@julien-gm](https://github.com/julien-gm)] 1. [8fit](https://8fit.com/) [[@nicor88](https://github.com/nicor88), [@frnzska](https://github.com/frnzska)] 1. [90 Seconds](https://90seconds.tv/) [[@aaronmak](https://github.com/aaronmak)] From bb91246cfc6f7905c84aea4a44c11aa057b8f42c Mon Sep 17 00:00:00 2001 From: Xiaodong Date: Tue, 26 Feb 2019 01:50:00 +0800 Subject: [PATCH 0139/1104] [AIRFLOW-3947] Flash msg for no DAG-level access error (#4767) * [AIRFLOW-3947] Flash msg for no DAG-level access error It will show and remind user when a user clicks on a DAG that he/she doesn't have can_dag_read or can_dag_edit permissions. * Change the flash msg contents --- airflow/www/decorators.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/airflow/www/decorators.py b/airflow/www/decorators.py index 889a0b1b33b2f..1db2dfeb363b6 100644 --- a/airflow/www/decorators.py +++ b/airflow/www/decorators.py @@ -21,7 +21,7 @@ import functools import pendulum from io import BytesIO as IO -from flask import after_this_request, redirect, request, url_for, g +from flask import after_this_request, flash, redirect, request, url_for, g from airflow.models.log import Log from airflow.utils.db import create_session @@ -120,6 +120,7 @@ def wrapper(self, *args, **kwargs): dag_id)))): return f(self, *args, **kwargs) else: + flash("Access is Denied", "danger") return redirect(url_for(self.appbuilder.sm.auth_view. __class__.__name__ + ".login")) return wrapper From c552996483f5aab5e31793317b26aba62c13332e Mon Sep 17 00:00:00 2001 From: Joshua Carp Date: Mon, 25 Feb 2019 18:07:34 -0500 Subject: [PATCH 0140/1104] [AIRFLOW-3929] Use anchor tags for modal links on dag detail pages. (#4742) --- airflow/www/templates/airflow/dag.html | 307 ++++++++++++------------- 1 file changed, 142 insertions(+), 165 deletions(-) diff --git a/airflow/www/templates/airflow/dag.html b/airflow/www/templates/airflow/dag.html index 56b22ffd721d3..842ffbd0c8fcf 100644 --- a/airflow/www/templates/airflow/dag.html +++ b/airflow/www/templates/airflow/dag.html @@ -109,9 +109,9 @@

  • - - Delete + onclick="return confirmDeleteDag('{{ dag.safe_dag_id }}')"> + + Delete
  • @@ -135,23 +135,23 @@