diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d4d0e78fb66d0..e1510406b468d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -314,10 +314,24 @@ jobs: run: ./scripts/ci/docs/ci_docs.sh --docs-only - name: "Upload documentation" uses: actions/upload-artifact@v2 - if: always() + if: always() && github.event_name == 'pull_request' with: name: airflow-documentation path: "./files/documentation" + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + if: > + github.ref == 'refs/heads/master' && github.repository == 'apache/airflow' && + github.event_name == 'push' + with: + aws-access-key-id: ${{ secrets.DOCS_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.DOCS_AWS_SECRET_ACCESS_KEY }} + aws-region: eu-central-1 + - name: "Upload documentation to AWS S3" + if: > + github.ref == 'refs/heads/master' && github.repository == 'apache/airflow' && + github.event_name == 'push' + run: aws s3 sync ./files/documentation s3://apache-airflow-docs docs-spell-check: timeout-minutes: 30 diff --git a/.gitignore b/.gitignore index aac0017d5a67c..d91331a502ae5 100644 --- a/.gitignore +++ b/.gitignore @@ -88,6 +88,8 @@ instance/ # Sphinx documentation docs/_build/ docs/_api/ +docs/*/_api/ +docs/_doctrees # PyBuilder target/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ecb8f1cb9f0cc..a08435c72302d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -457,6 +457,7 @@ repos: entry: ./scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py language: python require_serial: true + files: provider.yaml$ additional_dependencies: ['PyYAML==5.3.1', 'jsonschema==3.2.0', 'tabulate==0.8.7'] - id: mermaid name: Generate mermaid images diff --git a/CI.rst b/CI.rst index 0825b04397d59..fe200304e2230 100644 --- a/CI.rst +++ b/CI.rst @@ -695,9 +695,22 @@ We also have a script that can help to clean-up the old artifacts: CodeQL scan ----------- -The CodeQL security scan uses GitHub security scan framework to scan our code for security violations. +The `CodeQL `_ security scan uses GitHub security scan framework to scan our code for security violations. It is run for JavaScript and python code. +Publishing documentation +------------------------ + +Documentation from the ``master`` branch is automatically published on Amazon S3. + +To make this possible, Github Action has secrets set up with credentials +for an Amazon Web Service account - ``DOCS_AWS_ACCESS_KEY_ID`` and ``DOCS_AWS_SECRET_ACCESS_KEY``. + +This account has permission to write/list/put objects to bucket ``apache-airflow-docs``. This bucket has public access configured, which means it is accessible through the website endpoint. For more information, see: `Hosting a static website on Amazon S3 + `_ + +Website endpoint: http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com/ + Naming conventions for stored images ==================================== diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a403ca2842450..92d7d3a796a9a 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -250,7 +250,7 @@ Step 4: Prepare PR For example, to address this example issue, do the following: - * Read about `email configuration in Airflow `__. + * Read about `email configuration in Airflow `__. * Find the class you should modify. For the example GitHub issue, this is `email.py `__. @@ -713,47 +713,9 @@ jobs for each python version. Documentation ============= -The latest API documentation (for the master branch) is usually available -`here `__. +Documentation for ``apache-airflow`` package and other packages that are closely related to it ie. providers packages are in ``/docs/`` directory. For detailed information on documentation development, see: `docs/README.md `_ -To generate a local version you can use ``_. - -The documentation build consists of verifying consistency of documentation and two steps: - -* spell checking -* building documentation - -You can only run one of the steps via ``--spellcheck-only`` or ``--docs-only``. - -.. code-block:: bash - - ./breeze build-docs - -or just to run spell-check - -.. code-block:: bash - - ./breeze build-docs -- --spellcheck-only - -or just to run documentation building - -.. code-block:: bash - - ./breeze build-docs -- --docs-only - -Also documentation is available as downloadable artifact in GitHub Actions after the CI builds your PR. - -**Known issues:** - -If you are creating a new directory for new integration in the ``airflow.providers`` package, -you should also update the ``docs/autoapi_templates/index.rst`` file. - -If you are creating new ``hooks``, ``sensors``, ``operators`` directory in -the ``airflow.providers`` package, you should also update -the ``docs/operators-and-hooks-ref.rst`` file. - -If you are creating ``example_dags`` directory, you need to create ``example_dags/__init__.py`` with Apache -license or copy another ``__init__.py`` file that contains the necessary license. +For Helm Chart documentation, see: `/chart/README.md <../chart/READMe.md>`__ Static code checks ================== diff --git a/README.md b/README.md index 24680346a5c73..c49ad3f201108 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,6 @@ [![PyPI version](https://badge.fury.io/py/apache-airflow.svg)](https://badge.fury.io/py/apache-airflow) [![GitHub Build](/~https://github.com/apache/airflow/workflows/CI%20Build/badge.svg)](/~https://github.com/apache/airflow/actions) [![Coverage Status](https://img.shields.io/codecov/c/github/apache/airflow/master.svg)](https://codecov.io/github/apache/airflow?branch=master) -[![Documentation Status](https://readthedocs.org/projects/airflow/badge/?version=latest)](https://airflow.readthedocs.io/en/latest/?badge=latest) [![License](http://img.shields.io/:license-Apache%202-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/apache-airflow.svg)](https://pypi.org/project/apache-airflow/) [![Docker Pulls](https://img.shields.io/docker/pulls/apache/airflow.svg)](https://hub.docker.com/r/apache/airflow) @@ -135,7 +134,7 @@ pip install apache-airflow[postgres,google]==1.10.12 \ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.12/constraints-3.7.txt" ``` -For information on installing backport providers check https://airflow.readthedocs.io/en/latest/backport-providers.html. +For information on installing backport providers check [/docs/backport-providers.rst][/docs/backport-providers.rst]. ## Official source code diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index ecaf3d2d793d1..f9bbb431ab955 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -171,7 +171,7 @@ info: The default is to deny all requests. For details on configuring the authentication, see - [API Authorization](https://airflow.readthedocs.io/en/latest/security/api.html). + [API Authorization](https://airflow.apache.org/docs/stable/security/api.html). # Errors @@ -1880,7 +1880,7 @@ components: DAG details. For details see: - (airflow.models.DAG)[https://airflow.readthedocs.io/en/stable/_api/airflow/models/index.html#airflow.models.DAG] + (airflow.models.DAG)[https://airflow.apache.org/docs/stable/_api/airflow/models/index.html#airflow.models.DAG] allOf: - $ref: '#/components/schemas/DAG' - type: object diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json index 219a5e5cce476..9e2933d9cb441 100644 --- a/airflow/provider.yaml.schema.json +++ b/airflow/provider.yaml.schema.json @@ -6,6 +6,10 @@ "description": "Package name available under which the package is available in the PyPI repository.", "type": "string" }, + "name": { + "description": "Provider name", + "type": "string" + }, "description": { "description": "Information about the package in RST format", "type": "string" @@ -167,6 +171,7 @@ }, "additionalProperties": false, "required": [ + "name", "package-name", "description", "versions" diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/airflow/providers/amazon/aws/hooks/base_aws.py index d7dff34d46439..d21862244282f 100644 --- a/airflow/providers/amazon/aws/hooks/base_aws.py +++ b/airflow/providers/amazon/aws/hooks/base_aws.py @@ -21,7 +21,7 @@ .. seealso:: For more information on how to use this hook, take a look at the guide: - :ref:`howto/connection:AWSHook` + :ref:`apache-airflow:howto/connection:AWSHook` """ import configparser diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py index 31aa6b446fda3..e0554fb2e5f6a 100644 --- a/airflow/providers/amazon/aws/operators/datasync.py +++ b/airflow/providers/amazon/aws/operators/datasync.py @@ -36,7 +36,7 @@ class AWSDataSyncOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:AWSDataSyncOperator` + :ref:`apache-airflow:howto/operator:AWSDataSyncOperator` .. note:: There may be 0, 1, or many existing DataSync Tasks defined in your AWS environment. The default behavior is to create a new Task if there are 0, or diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/airflow/providers/amazon/aws/operators/ecs.py index 9584a31d77271..427b7f5f3f52b 100644 --- a/airflow/providers/amazon/aws/operators/ecs.py +++ b/airflow/providers/amazon/aws/operators/ecs.py @@ -76,7 +76,7 @@ class ECSOperator(BaseOperator): # pylint: disable=too-many-instance-attributes .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:ECSOperator` + :ref:`apache-airflow:howto/operator:ECSOperator` :param task_definition: the task definition name on Elastic Container Service :type task_definition: str diff --git a/airflow/providers/amazon/aws/operators/glacier.py b/airflow/providers/amazon/aws/operators/glacier.py index c88d6526213ad..eecb522ee7019 100644 --- a/airflow/providers/amazon/aws/operators/glacier.py +++ b/airflow/providers/amazon/aws/operators/glacier.py @@ -26,7 +26,7 @@ class GlacierCreateJobOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:GlacierCreateJobOperator` + :ref:`apache-airflow:howto/operator:GlacierCreateJobOperator` :param aws_conn_id: The reference to the AWS connection details :type aws_conn_id: str diff --git a/airflow/providers/amazon/aws/sensors/glacier.py b/airflow/providers/amazon/aws/sensors/glacier.py index 35548bf31627b..744c2df2b3f89 100644 --- a/airflow/providers/amazon/aws/sensors/glacier.py +++ b/airflow/providers/amazon/aws/sensors/glacier.py @@ -35,6 +35,10 @@ class GlacierJobOperationSensor(BaseSensorOperator): """ Glacier sensor for checking job state. This operator runs only in reschedule mode. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`apache-airflow:howto/operator:GlacierJobOperationSensor` + :param aws_conn_id: The reference to the AWS connection details :type aws_conn_id: str :param vault_name: name of Glacier vault on which job is executed diff --git a/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py b/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py index 5ee9802f1a87e..04ff1c084ab71 100644 --- a/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +++ b/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py @@ -34,7 +34,7 @@ class GlacierToGCSOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:GlacierToGCSOperator` + :ref:`apache-airflow:howto/operator:GlacierToGCSOperator` :param aws_conn_id: The reference to the AWS connection details :type aws_conn_id: str diff --git a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py index 015e7342c79ef..2b30d7a5c7256 100644 --- a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py @@ -28,7 +28,7 @@ class ImapAttachmentToS3Operator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:ImapAttachmentToS3Operator` + :ref:`apache-airflow:howto/operator:ImapAttachmentToS3Operator` :param imap_attachment_name: The file name of the mail attachment that you want to transfer. :type imap_attachment_name: str diff --git a/airflow/providers/amazon/aws/transfers/s3_to_redshift.py b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py index 1da74ba889442..5b352a74d795e 100644 --- a/airflow/providers/amazon/aws/transfers/s3_to_redshift.py +++ b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py @@ -29,7 +29,7 @@ class S3ToRedshiftOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:S3ToRedshiftOperator` + :ref:`apache-airflow:howto/operator:S3ToRedshiftOperator` :param schema: reference to a specific schema in redshift database :type schema: str diff --git a/airflow/providers/amazon/provider.yaml b/airflow/providers/amazon/provider.yaml index bc852af008f4e..53b14e3fa951f 100644 --- a/airflow/providers/amazon/provider.yaml +++ b/airflow/providers/amazon/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-amazon +name: Amazon description: | Amazon integration (including `Amazon Web Services (AWS) `__). diff --git a/airflow/providers/apache/cassandra/provider.yaml b/airflow/providers/apache/cassandra/provider.yaml index a17345deb68e4..77402c05a2b2d 100644 --- a/airflow/providers/apache/cassandra/provider.yaml +++ b/airflow/providers/apache/cassandra/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-cassandra +name: Apache Cassandra description: | `Apache Cassandra `__. diff --git a/airflow/providers/apache/cassandra/sensors/record.py b/airflow/providers/apache/cassandra/sensors/record.py index bc61b2953b7a9..8d28e6932e781 100644 --- a/airflow/providers/apache/cassandra/sensors/record.py +++ b/airflow/providers/apache/cassandra/sensors/record.py @@ -33,7 +33,7 @@ class CassandraRecordSensor(BaseSensorOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:CassandraRecordSensor` + :ref:`apache-airflow:howto/operator:CassandraRecordSensor` For example, if you want to wait for a record that has values 'v1' and 'v2' for each primary keys 'p1' and 'p2' to be populated in keyspace 'k' and table 't', diff --git a/airflow/providers/apache/cassandra/sensors/table.py b/airflow/providers/apache/cassandra/sensors/table.py index 64129d780a98d..d64b32e817e40 100644 --- a/airflow/providers/apache/cassandra/sensors/table.py +++ b/airflow/providers/apache/cassandra/sensors/table.py @@ -34,7 +34,7 @@ class CassandraTableSensor(BaseSensorOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:CassandraTableSensor` + :ref:`apache-airflow:howto/operator:CassandraTableSensor` For example, if you want to wait for a table called 't' to be created diff --git a/airflow/providers/apache/druid/provider.yaml b/airflow/providers/apache/druid/provider.yaml index b9c2783a24f3a..57bd609f295db 100644 --- a/airflow/providers/apache/druid/provider.yaml +++ b/airflow/providers/apache/druid/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-druid +name: Apache Druid description: | `Apache Druid `__. diff --git a/airflow/providers/apache/hdfs/provider.yaml b/airflow/providers/apache/hdfs/provider.yaml index b764f191360d2..c6f46de3a2e7b 100644 --- a/airflow/providers/apache/hdfs/provider.yaml +++ b/airflow/providers/apache/hdfs/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-hdfs +name: Apache HDFS description: | `Hadoop Distributed File System (HDFS) `__ and `WebHDFS `__. diff --git a/airflow/providers/apache/hive/provider.yaml b/airflow/providers/apache/hive/provider.yaml index 9234075652647..98b94829d86bc 100644 --- a/airflow/providers/apache/hive/provider.yaml +++ b/airflow/providers/apache/hive/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-hive +name: Apache Hive description: | `Apache Hive `__ diff --git a/airflow/providers/apache/kylin/provider.yaml b/airflow/providers/apache/kylin/provider.yaml index 80fb84e96d688..008e2a1299155 100644 --- a/airflow/providers/apache/kylin/provider.yaml +++ b/airflow/providers/apache/kylin/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-kylin +name: Apache Hive description: | `Apache Kylin `__ diff --git a/airflow/providers/apache/livy/provider.yaml b/airflow/providers/apache/livy/provider.yaml index 3a4088b408762..02757f06e4efa 100644 --- a/airflow/providers/apache/livy/provider.yaml +++ b/airflow/providers/apache/livy/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-livy +name: Apache Livy description: | `Apache Livy `__ diff --git a/airflow/providers/apache/pig/provider.yaml b/airflow/providers/apache/pig/provider.yaml index aec15b182a4b6..f1754c72e9962 100644 --- a/airflow/providers/apache/pig/provider.yaml +++ b/airflow/providers/apache/pig/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-pig +name: Apache Pig description: | `Apache Pig `__ diff --git a/airflow/providers/apache/pinot/provider.yaml b/airflow/providers/apache/pinot/provider.yaml index a3e1e9075fea2..67c38921aab2b 100644 --- a/airflow/providers/apache/pinot/provider.yaml +++ b/airflow/providers/apache/pinot/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-pinot +name: Apache Pinot description: | `Apache Pinot `__ diff --git a/airflow/providers/apache/spark/operators/spark_jdbc.py b/airflow/providers/apache/spark/operators/spark_jdbc.py index 63e42ca3fac12..b154f8d82db7b 100644 --- a/airflow/providers/apache/spark/operators/spark_jdbc.py +++ b/airflow/providers/apache/spark/operators/spark_jdbc.py @@ -33,7 +33,7 @@ class SparkJDBCOperator(SparkSubmitOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:SparkJDBCOperator` + :ref:`apache-airflow:howto/operator:SparkJDBCOperator` :param spark_app_name: Name of the job (default airflow-spark-jdbc) :type spark_app_name: str diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/airflow/providers/apache/spark/operators/spark_sql.py index 69254092a2c9b..90f641c0ba01e 100644 --- a/airflow/providers/apache/spark/operators/spark_sql.py +++ b/airflow/providers/apache/spark/operators/spark_sql.py @@ -29,7 +29,7 @@ class SparkSqlOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:SparkSqlOperator` + :ref:`apache-airflow:howto/operator:SparkSqlOperator` :param sql: The SQL query to execute. (templated) :type sql: str diff --git a/airflow/providers/apache/spark/operators/spark_submit.py b/airflow/providers/apache/spark/operators/spark_submit.py index 224b53585195d..373797de2383f 100644 --- a/airflow/providers/apache/spark/operators/spark_submit.py +++ b/airflow/providers/apache/spark/operators/spark_submit.py @@ -33,7 +33,7 @@ class SparkSubmitOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:SparkSubmitOperator` + :ref:`apache-airflow:howto/operator:SparkSubmitOperator` :param application: The application that submitted as a job, either jar or py file. (templated) :type application: str diff --git a/airflow/providers/apache/spark/provider.yaml b/airflow/providers/apache/spark/provider.yaml index 18a4ce8a7d5eb..51f9e8a370387 100644 --- a/airflow/providers/apache/spark/provider.yaml +++ b/airflow/providers/apache/spark/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-spark +name: Apache Spark description: | `Apache Spark `__ diff --git a/airflow/providers/apache/sqoop/provider.yaml b/airflow/providers/apache/sqoop/provider.yaml index 840406d549436..cea443ded8f80 100644 --- a/airflow/providers/apache/sqoop/provider.yaml +++ b/airflow/providers/apache/sqoop/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-apache-sqoop +name: Apache Sqoop description: | `Apache Sqoop `__ diff --git a/airflow/providers/celery/provider.yaml b/airflow/providers/celery/provider.yaml index 8d3a6c0c48418..6bebb795ab5f9 100644 --- a/airflow/providers/celery/provider.yaml +++ b/airflow/providers/celery/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-celery +name: Celery description: | `Celery `__ diff --git a/airflow/providers/cloudant/provider.yaml b/airflow/providers/cloudant/provider.yaml index 56d8e2d4f2f04..4986c439dc133 100644 --- a/airflow/providers/cloudant/provider.yaml +++ b/airflow/providers/cloudant/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-cloudant +name: IBM Cloudant description: | `IBM Cloudant `__ diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py index 7c203a8d87c60..f7088d26d6e64 100644 --- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py +++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py @@ -48,7 +48,7 @@ class KubernetesHook(BaseHook): .. seealso:: For more information about Kubernetes connection: - :ref:`howto/connection:kubernetes` + :ref:`apache-airflow:howto/connection:kubernetes` :param conn_id: the connection to Kubernetes cluster :type conn_id: str diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py index 5e8832e283635..f4c64f326f4b8 100644 --- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py @@ -52,7 +52,7 @@ class KubernetesPodOperator(BaseOperator): # pylint: disable=too-many-instance- .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:KubernetesPodOperator` + :ref:`apache-airflow:howto/operator:KubernetesPodOperator` .. note:: If you use `Google Kubernetes Engine `__ diff --git a/airflow/providers/cncf/kubernetes/provider.yaml b/airflow/providers/cncf/kubernetes/provider.yaml index bc6d01dd35408..c66fd7134f7f2 100644 --- a/airflow/providers/cncf/kubernetes/provider.yaml +++ b/airflow/providers/cncf/kubernetes/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-cncf-kubernetes +name: Kubernetes description: | `Kubernetes `__ diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index fe753bc0921c9..fc209d83985b2 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -150,6 +150,10 @@ class DatabricksSubmitRunOperator(BaseOperator): - ``run_name`` - ``timeout_seconds`` + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`apache-airflow:howto/operator:DatabricksSubmitRunOperator` + :param json: A JSON object containing API parameters which will be passed directly to the ``api/2.0/jobs/runs/submit`` endpoint. The other named parameters (i.e. ``spark_jar_task``, ``notebook_task``..) to this operator will diff --git a/airflow/providers/databricks/provider.yaml b/airflow/providers/databricks/provider.yaml index 00bd293a5f634..d29826c8582a5 100644 --- a/airflow/providers/databricks/provider.yaml +++ b/airflow/providers/databricks/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-databricks +name: Databricks description: | `Databricks `__ diff --git a/airflow/providers/datadog/provider.yaml b/airflow/providers/datadog/provider.yaml index 78262444bd815..148e647a7b478 100644 --- a/airflow/providers/datadog/provider.yaml +++ b/airflow/providers/datadog/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-datadog +name: Datadog description: | `Datadog `__ diff --git a/airflow/providers/dingding/provider.yaml b/airflow/providers/dingding/provider.yaml index 8fc729c0ede2e..3c21b9701c34a 100644 --- a/airflow/providers/dingding/provider.yaml +++ b/airflow/providers/dingding/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-dingding +name: Dingding description: | `Dingding `__ diff --git a/airflow/providers/discord/provider.yaml b/airflow/providers/discord/provider.yaml index 4e72a9c2aaa10..217ce48eb7306 100644 --- a/airflow/providers/discord/provider.yaml +++ b/airflow/providers/discord/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-discord +name: Discord description: | `Discord `__ diff --git a/airflow/providers/docker/provider.yaml b/airflow/providers/docker/provider.yaml index 7549bf068786d..a02202c369f57 100644 --- a/airflow/providers/docker/provider.yaml +++ b/airflow/providers/docker/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-docker +name: Docker description: | `Docker `__ diff --git a/airflow/providers/elasticsearch/provider.yaml b/airflow/providers/elasticsearch/provider.yaml index a3b49944293aa..079d7244d36be 100644 --- a/airflow/providers/elasticsearch/provider.yaml +++ b/airflow/providers/elasticsearch/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-elasticsearch +name: Elasticsearch description: | `Elasticsearch `__ diff --git a/airflow/providers/exasol/provider.yaml b/airflow/providers/exasol/provider.yaml index fbec0f3715a03..ae13837ff062e 100644 --- a/airflow/providers/exasol/provider.yaml +++ b/airflow/providers/exasol/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-exasol +name: Exasol description: | `Exasol `__ diff --git a/airflow/providers/facebook/provider.yaml b/airflow/providers/facebook/provider.yaml index 067e35fdc2ad5..c0f9dfb16111b 100644 --- a/airflow/providers/facebook/provider.yaml +++ b/airflow/providers/facebook/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-facebook +name: Facebook description: | `Facebook Ads `__ diff --git a/airflow/providers/ftp/provider.yaml b/airflow/providers/ftp/provider.yaml index 2dad415a286bd..062f2994ad1a3 100644 --- a/airflow/providers/ftp/provider.yaml +++ b/airflow/providers/ftp/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-ftp +name: File Transfer Protocol (FTP) description: | `File Transfer Protocol (FTP) `__ diff --git a/airflow/providers/google/__init__.py b/airflow/providers/google/__init__.py index 9d7b5d8ffb82f..dc2ff2a692e50 100644 --- a/airflow/providers/google/__init__.py +++ b/airflow/providers/google/__init__.py @@ -14,9 +14,12 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import importlib import logging -from airflow.configuration import conf +# HACK: +# Sphinx-autoapi doesn't like imports to excluded packages in the main module. +conf = importlib.import_module('airflow.configuration').conf PROVIDERS_GOOGLE_VERBOSE_LOGGING: bool = conf.getboolean( 'providers_google', 'VERBOSE_LOGGING', fallback=False diff --git a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py index 5600a29a8696a..cdee1b20a3598 100644 --- a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py @@ -373,7 +373,7 @@ def list_transfer_operations(self, request_filter: Optional[dict] = None, **kwar * project_id is optional if you have a project id defined in the connection - See: :ref:`howto/connection:gcp` + See: :doc:`/connections/gcp` :type request_filter: dict :return: transfer operation diff --git a/airflow/providers/google/cloud/operators/cloud_sql.py b/airflow/providers/google/cloud/operators/cloud_sql.py index 7aef1748c9e59..12dee35f32e9e 100644 --- a/airflow/providers/google/cloud/operators/cloud_sql.py +++ b/airflow/providers/google/cloud/operators/cloud_sql.py @@ -1047,7 +1047,7 @@ class CloudSQLExecuteQueryOperator(BaseOperator): :param gcp_cloudsql_conn_id: The connection ID used to connect to Google Cloud SQL its schema should be gcpcloudsql://. See :class:`~airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook` for - details on how to define gcpcloudsql:// connection. + details on how to define ``gcpcloudsql://`` connection. :type gcp_cloudsql_conn_id: str """ diff --git a/airflow/providers/google/cloud/utils/field_validator.py b/airflow/providers/google/cloud/utils/field_validator.py index 4a44b3566bbb1..876edc412715f 100644 --- a/airflow/providers/google/cloud/utils/field_validator.py +++ b/airflow/providers/google/cloud/utils/field_validator.py @@ -18,7 +18,7 @@ """Validator for body fields sent via Google Cloud API. The validator performs validation of the body (being dictionary of fields) that -is sent in the API request to Google Cloud (via googleclient API usually). +is sent in the API request to Google Cloud (via ``googleclient`` API usually). Context ------- diff --git a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py b/airflow/providers/google/cloud/utils/mlengine_operator_utils.py index 88fe3725db22a..bd292c9f4f3ea 100644 --- a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py +++ b/airflow/providers/google/cloud/utils/mlengine_operator_utils.py @@ -66,15 +66,18 @@ def create_evaluate_ops( # pylint: disable=too-many-arguments and for Cloud Dataflow, https://cloud.google.com/dataflow/docs/ It returns three chained operators for prediction, summary, and validation, - named as -prediction, -summary, and -validation, + named as ``-prediction``, ``-summary``, and ``-validation``, respectively. - ( should contain only alphanumeric characters or hyphen.) + (```` should contain only alphanumeric characters or hyphen.) The upstream and downstream can be set accordingly like: - pred, _, val = create_evaluate_ops(...) - pred.set_upstream(upstream_op) - ... - downstream_op.set_upstream(val) + + .. code-block:: python + + pred, _, val = create_evaluate_ops(...) + pred.set_upstream(upstream_op) + ... + downstream_op.set_upstream(val) Callers will provide two python callables, metric_fn and validate_fn, in order to customize the evaluation behavior as they wish. @@ -165,7 +168,7 @@ def validate_err_and_count(summary): :type dataflow_options: dictionary :param model_uri: GCS path of the model exported by Tensorflow using - tensorflow.estimator.export_savedmodel(). It cannot be used with + ``tensorflow.estimator.export_savedmodel()``. It cannot be used with model_name or version_name below. See MLEngineBatchPredictionOperator for more detail. :type model_uri: str diff --git a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py b/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py index 4e197b0de82cf..d95d8a6a785c1 100644 --- a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py +++ b/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py @@ -26,12 +26,12 @@ - ``--prediction_path``: The GCS folder that contains BatchPrediction results, containing - prediction.results-NNNNN-of-NNNNN files in the json format. + ``prediction.results-NNNNN-of-NNNNN`` files in the json format. Output will be also stored in this folder, as 'prediction.summary.json'. - ``--metric_fn_encoded``: An encoded function that calculates and returns a tuple of metric(s) for a given instance (as a dictionary). It should be encoded - via base64.b64encode(dill.dumps(fn, recurse=True)). + via ``base64.b64encode(dill.dumps(fn, recurse=True))``. - ``--metric_keys``: A comma-separated key(s) of the aggregated metric(s) in the summary output. The order and the size of the keys must match to the output diff --git a/airflow/providers/google/provider.yaml b/airflow/providers/google/provider.yaml index 51d2abdff3273..d96c58058d404 100644 --- a/airflow/providers/google/provider.yaml +++ b/airflow/providers/google/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-google +name: Google description: | Google services including: @@ -33,52 +34,52 @@ integrations: - integration-name: Google Analytics360 external-doc-url: https://analytics.google.com/ how-to-guide: - - /docs/howto/operator/google/marketing_platform/analytics.rst + - /docs/apache-airflow-providers-google/operators/marketing_platform/analytics.rst tags: [gmp] - integration-name: Google Ads external-doc-url: https://ads.google.com/ how-to-guide: - - /docs/howto/operator/google/ads.rst + - /docs/apache-airflow-providers-google/operators/ads.rst tags: [gmp] - integration-name: Google AutoML external-doc-url: https://cloud.google.com/automl/ how-to-guide: - - /docs/howto/operator/google/cloud/automl.rst + - /docs/apache-airflow-providers-google/operators/cloud/automl.rst tags: [gcp] - integration-name: Google BigQuery Data Transfer Service external-doc-url: https://cloud.google.com/bigquery/transfer/ how-to-guide: - - /docs/howto/operator/google/cloud/bigquery_dts.rst + - /docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst tags: [gcp] - integration-name: Google BigQuery how-to-guide: - - /docs/howto/operator/google/cloud/bigquery.rst + - /docs/apache-airflow-providers-google/operators/cloud/bigquery.rst external-doc-url: https://cloud.google.com/bigquery/ tags: [gcp] - integration-name: Google Bigtable how-to-guide: - - /docs/howto/operator/google/cloud/bigtable.rst + - /docs/apache-airflow-providers-google/operators/cloud/bigtable.rst external-doc-url: https://cloud.google.com/bigtable/ tags: [gcp] - integration-name: Google Cloud Build external-doc-url: https://cloud.google.com/cloud-build/ how-to-guide: - - /docs/howto/operator/google/cloud/cloud_build.rst + - /docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst tags: [gcp] - integration-name: Google Cloud Data Loss Prevention (DLP) external-doc-url: https://cloud.google.com/dlp/ how-to-guide: - - /docs/howto/operator/google/cloud/dlp.rst + - /docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst tags: [gcp] - integration-name: Google Cloud Firestore external-doc-url: https://firebase.google.com/docs/firestore how-to-guide: - - /docs/howto/operator/google/firebase/firestore.rst + - /docs/apache-airflow-providers-google/operators/firebase/firestore.rst tags: [gcp] - integration-name: Google Cloud Functions external-doc-url: https://cloud.google.com/functions/ how-to-guide: - - /docs/howto/operator/google/cloud/functions.rst + - /docs/apache-airflow-providers-google/operators/cloud/functions.rst tags: [gcp] - integration-name: Google Cloud Key Management Service (KMS) external-doc-url: https://cloud.google.com/kms/ @@ -86,13 +87,13 @@ integrations: - integration-name: Google Cloud Life Sciences external-doc-url: https://cloud.google.com/life-sciences/ how-to-guide: - - /docs/howto/operator/google/cloud/life_sciences.rst + - /docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst tags: [gcp] - integration-name: Google Cloud Memorystore external-doc-url: https://cloud.google.com/memorystore/ how-to-guide: - - /docs/howto/operator/google/cloud/cloud_memorystore.rst - - /docs/howto/operator/google/cloud/cloud_memorystore_memcached.rst + - /docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst + - /docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst tags: [gcp] - integration-name: Google Cloud OS Login external-doc-url: https://cloud.google.com/compute/docs/oslogin/ @@ -100,7 +101,7 @@ integrations: - integration-name: Google Cloud Pub/Sub external-doc-url: https://cloud.google.com/pubsub/ how-to-guide: - - /docs/howto/operator/google/cloud/pubsub.rst + - /docs/apache-airflow-providers-google/operators/cloud/pubsub.rst tags: [gcp] - integration-name: Google Cloud Secret Manager external-doc-url: https://cloud.google.com/secret-manager/ @@ -108,28 +109,28 @@ integrations: - integration-name: Google Cloud Spanner external-doc-url: https://cloud.google.com/spanner/ how-to-guide: - - /docs/howto/operator/google/cloud/spanner.rst + - /docs/apache-airflow-providers-google/operators/cloud/spanner.rst tags: [gcp] - integration-name: Google Cloud Speech-to-Text external-doc-url: https://cloud.google.com/speech-to-text/ how-to-guide: - - /docs/howto/operator/google/cloud/speech_to_text.rst - - /docs/howto/operator/google/cloud/translate_speech.rst + - /docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst + - /docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst tags: [gcp] - integration-name: Google Cloud SQL external-doc-url: https://cloud.google.com/sql/ how-to-guide: - - /docs/howto/operator/google/cloud/cloud_sql.rst + - /docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst tags: [gcp] - integration-name: Google Cloud Stackdriver external-doc-url: https://cloud.google.com/stackdriver how-to-guide: - - /docs/howto/operator/google/cloud/stackdriver.rst + - /docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst tags: [gcp] - integration-name: Google Cloud Storage (GCS) external-doc-url: https://cloud.google.com/gcs/ how-to-guide: - - /docs/howto/operator/google/cloud/gcs.rst + - /docs/apache-airflow-providers-google/operators/cloud/gcs.rst tags: [gcp] - integration-name: Google Cloud Tasks external-doc-url: https://cloud.google.com/tasks/ @@ -137,29 +138,29 @@ integrations: - integration-name: Google Cloud Text-to-Speech external-doc-url: https://cloud.google.com/text-to-speech/ how-to-guide: - - /docs/howto/operator/google/cloud/text_to_speech.rst + - /docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst tags: [gcp] - integration-name: Google Cloud Translation external-doc-url: https://cloud.google.com/translate/ how-to-guide: - - /docs/howto/operator/google/cloud/translate.rst - - /docs/howto/operator/google/cloud/translate_speech.rst + - /docs/apache-airflow-providers-google/operators/cloud/translate.rst + - /docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst tags: [gcp] - integration-name: Google Cloud Video Intelligence external-doc-url: https://cloud.google.com/video_intelligence/ how-to-guide: - - /docs/howto/operator/google/cloud/video_intelligence.rst + - /docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst tags: [gcp] - integration-name: Google Cloud Vision external-doc-url: https://cloud.google.com/vision/ how-to-guide: - - /docs/howto/operator/google/cloud/vision.rst + - /docs/apache-airflow-providers-google/operators/cloud/vision.rst tags: [gcp] - integration-name: Google Compute Engine external-doc-url: https://cloud.google.com/compute/ how-to-guide: - - /docs/howto/operator/google/cloud/compute.rst - - /docs/howto/operator/google/cloud/compute_ssh.rst + - /docs/apache-airflow-providers-google/operators/cloud/compute.rst + - /docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst tags: [gcp] - integration-name: Google Data Proc external-doc-url: https://cloud.yandex.com/services/data-proc @@ -167,7 +168,7 @@ integrations: - integration-name: Google Data Catalog external-doc-url: https://cloud.google.com/data-catalog how-to-guide: - - /docs/howto/operator/google/cloud/datacatalog.rst + - /docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst tags: [gcp] - integration-name: Google Dataflow external-doc-url: https://cloud.google.com/dataflow/ @@ -175,22 +176,22 @@ integrations: - integration-name: Google Data Fusion external-doc-url: https://cloud.google.com/data-fusion/ how-to-guide: - - /docs/howto/operator/google/cloud/datafusion.rst + - /docs/apache-airflow-providers-google/operators/cloud/datafusion.rst tags: [gcp] - integration-name: Google Dataprep external-doc-url: https://cloud.google.com/dataprep/ how-to-guide: - - /docs/howto/operator/google/cloud/dataprep.rst + - /docs/apache-airflow-providers-google/operators/cloud/dataprep.rst tags: [gcp] - integration-name: Google Dataproc external-doc-url: https://cloud.google.com/dataproc/ how-to-guide: - - /docs/howto/operator/google/cloud/dataproc.rst + - /docs/apache-airflow-providers-google/operators/cloud/dataproc.rst tags: [gcp] - integration-name: Google Datastore external-doc-url: https://cloud.google.com/datastore/ how-to-guide: - - /docs/howto/operator/google/cloud/datastore.rst + - /docs/apache-airflow-providers-google/operators/cloud/datastore.rst tags: [gcp] - integration-name: Google Deployment Manager external-doc-url: https://cloud.google.com/deployment-manager/ @@ -201,7 +202,7 @@ integrations: - integration-name: Google Campaign Manager external-doc-url: https://developers.google.com/doubleclick-advertisers how-to-guide: - - /docs/howto/operator/google/marketing_platform/campaign_manager.rst + - /docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst tags: [gcp] - integration-name: Google Cloud external-doc-url: https://cloud.google.com/ @@ -212,7 +213,7 @@ integrations: - integration-name: Google Display&Video 360 external-doc-url: https://marketingplatform.google.com/about/display-video-360/ how-to-guide: - - /docs/howto/operator/google/marketing_platform/display_video.rst + - /docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst tags: [gmp] - integration-name: Google Drive external-doc-url: https://www.google.com/drive/ @@ -220,7 +221,7 @@ integrations: - integration-name: Google Search Ads 360 external-doc-url: https://marketingplatform.google.com/about/search-ads-360/ how-to-guide: - - /docs/howto/operator/google/marketing_platform/search_ads.rst + - /docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst tags: [gmp] - integration-name: Google external-doc-url: https://developer.google.com/ @@ -228,27 +229,27 @@ integrations: - integration-name: Google Spreadsheet external-doc-url: https://www.google.com/intl/en/sheets/about/ how-to-guide: - - /docs/howto/operator/google/suite/sheets.rst + - /docs/apache-airflow-providers-google/operators/suite/sheets.rst tags: [google] - integration-name: Google Cloud Storage Transfer Service external-doc-url: https://cloud.google.com/storage/transfer/ how-to-guide: - - /docs/howto/operator/google/cloud/cloud_storage_transfer_service.rst + - /docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst tags: [gcp] - integration-name: Google Kubernetes Engine external-doc-url: https://cloud.google.com/kubernetes_engine/ how-to-guide: - - /docs/howto/operator/google/cloud/kubernetes_engine.rst + - /docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst tags: [gcp] - integration-name: Google Machine Learning Engine external-doc-url: https://cloud.google.com/ai-platform/ how-to-guide: - - /docs/howto/operator/google/cloud/mlengine.rst + - /docs/apache-airflow-providers-google/operators/cloud/mlengine.rst tags: [gcp] - integration-name: Google Cloud Natural Language external-doc-url: https://cloud.google.com/natural-language/ how-to-guide: - - /docs/howto/operator/google/cloud/natural_language.rst + - /docs/apache-airflow-providers-google/operators/cloud/natural_language.rst tags: [gcp] operators: @@ -548,36 +549,36 @@ hooks: transfers: - source-integration-name: Presto target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/presto_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.presto_to_gcs - source-integration-name: SQL target-integration-name: Google Cloud Storage (GCS) python-module: airflow.providers.google.cloud.transfers.sql_to_gcs - source-integration-name: Google Cloud Storage (GCS) target-integration-name: Google Drive - how-to-guide: /docs/howto/operator/google/transfer/gcs_to_gdrive.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst python-module: airflow.providers.google.suite.transfers.gcs_to_gdrive - source-integration-name: Microsoft SQL Server (MSSQL) target-integration-name: Google Cloud Storage (GCS) python-module: airflow.providers.google.cloud.transfers.mssql_to_gcs - source-integration-name: Microsoft Azure FileShare target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/azure_fileshare_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.azure_fileshare_to_gcs - source-integration-name: Apache Cassandra target-integration-name: Google Cloud Storage (GCS) python-module: airflow.providers.google.cloud.transfers.cassandra_to_gcs - source-integration-name: Google Spreadsheet target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/sheets_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.sheets_to_gcs - source-integration-name: Amazon Simple Storage Service (S3) target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/s3_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.s3_to_gcs - source-integration-name: Google Cloud Storage (GCS) target-integration-name: SSH File Transfer Protocol (SFTP) - how-to-guide: /docs/howto/operator/google/transfer/gcs_to_sftp.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst python-module: airflow.providers.google.cloud.transfers.gcs_to_sftp - source-integration-name: PostgreSQL target-integration-name: Google Cloud Storage (GCS) @@ -590,15 +591,15 @@ transfers: python-module: airflow.providers.google.cloud.transfers.gcs_to_bigquery - source-integration-name: Google Cloud Storage (GCS) target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/gcs_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.gcs_to_gcs - source-integration-name: Facebook Ads target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/facebook_ads_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.facebook_ads_to_gcs - source-integration-name: SSH File Transfer Protocol (SFTP) target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/sftp_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.sftp_to_gcs - source-integration-name: Microsoft Azure Data Lake Storage target-integration-name: Google Cloud Storage (GCS) @@ -609,25 +610,25 @@ transfers: - source-integration-name: MySQL target-integration-name: Google Cloud Storage (GCS) python-module: airflow.providers.google.cloud.transfers.mysql_to_gcs - how-to-guide: /docs/howto/operator/google/transfer/mysql_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst - source-integration-name: Google Cloud Storage (GCS) target-integration-name: Google Spreadsheet - how-to-guide: /docs/howto/operator/google/transfer/gcs_to_sheets.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst python-module: airflow.providers.google.suite.transfers.gcs_to_sheets - source-integration-name: Local target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/local_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.local_to_gcs - source-integration-name: Google BigQuery target-integration-name: Google Cloud Storage (GCS) python-module: airflow.providers.google.cloud.transfers.bigquery_to_gcs - source-integration-name: Google Cloud Storage (GCS) target-integration-name: Local - how-to-guide: /docs/howto/operator/google/transfer/gcs_to_local.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst python-module: airflow.providers.google.cloud.transfers.gcs_to_local - source-integration-name: Salesforce target-integration-name: Google Cloud Storage (GCS) - how-to-guide: /docs/howto/operator/google/transfer/salesforce_to_gcs.rst + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.salesforce_to_gcs - source-integration-name: Google Ads target-integration-name: Google Cloud Storage (GCS) diff --git a/airflow/providers/grpc/provider.yaml b/airflow/providers/grpc/provider.yaml index 65465ee0e6752..a607e97b3c2fe 100644 --- a/airflow/providers/grpc/provider.yaml +++ b/airflow/providers/grpc/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-grpc +name: gRPC description: | `gRPC `__ diff --git a/airflow/providers/hashicorp/provider.yaml b/airflow/providers/hashicorp/provider.yaml index 959df236abc82..5eb8a26067e67 100644 --- a/airflow/providers/hashicorp/provider.yaml +++ b/airflow/providers/hashicorp/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-hashicorp +name: Hashicorp description: | Hashicorp including `Hashicorp Vault `__ diff --git a/airflow/providers/http/operators/http.py b/airflow/providers/http/operators/http.py index 9e028a4e88ab5..e9f73bf4cb320 100644 --- a/airflow/providers/http/operators/http.py +++ b/airflow/providers/http/operators/http.py @@ -29,7 +29,7 @@ class SimpleHttpOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:SimpleHttpOperator` + :ref:`apache-airflow:howto/operator:SimpleHttpOperator` :param http_conn_id: The connection to run the operator against :type http_conn_id: str diff --git a/airflow/providers/http/provider.yaml b/airflow/providers/http/provider.yaml index 444984958a716..0fbcc7059067b 100644 --- a/airflow/providers/http/provider.yaml +++ b/airflow/providers/http/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-http +name: Hypertext Transfer Protocol (HTTP) description: | `Hypertext Transfer Protocol (HTTP) `__ diff --git a/airflow/providers/http/sensors/http.py b/airflow/providers/http/sensors/http.py index e7c1e02806be2..df069212e3650 100644 --- a/airflow/providers/http/sensors/http.py +++ b/airflow/providers/http/sensors/http.py @@ -45,7 +45,7 @@ def response_check(response, task_instance): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:HttpSensor` + :ref:`apache-airflow:howto/operator:HttpSensor` :param http_conn_id: The connection to run the sensor against :type http_conn_id: str diff --git a/airflow/providers/imap/provider.yaml b/airflow/providers/imap/provider.yaml index 6f6290326a249..7616c64270d16 100644 --- a/airflow/providers/imap/provider.yaml +++ b/airflow/providers/imap/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-imap +name: Internet Message Access Protocol (IMAP) description: | `Internet Message Access Protocol (IMAP) `__ diff --git a/airflow/providers/jdbc/operators/jdbc.py b/airflow/providers/jdbc/operators/jdbc.py index ce82898c38f77..b6c43272e098b 100644 --- a/airflow/providers/jdbc/operators/jdbc.py +++ b/airflow/providers/jdbc/operators/jdbc.py @@ -28,6 +28,10 @@ class JdbcOperator(BaseOperator): Requires jaydebeapi. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`apache-airflow:howto/operator:JdbcOperator` + :param sql: the sql code to be executed. (templated) :type sql: Can receive a str representing a sql statement, a list of str (sql statements), or reference to a template file. diff --git a/airflow/providers/jdbc/provider.yaml b/airflow/providers/jdbc/provider.yaml index 4cb9b1cb98654..dac49d3cc60f0 100644 --- a/airflow/providers/jdbc/provider.yaml +++ b/airflow/providers/jdbc/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-jdbc +name: Java Database Connectivity (JDBC) description: | `Java Database Connectivity (JDBC) `__ diff --git a/airflow/providers/jenkins/provider.yaml b/airflow/providers/jenkins/provider.yaml index da1c1d125c5ab..36e10f65b49f7 100644 --- a/airflow/providers/jenkins/provider.yaml +++ b/airflow/providers/jenkins/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-jenkins +name: Jenkins description: | `Jenkins `__ diff --git a/airflow/providers/jira/provider.yaml b/airflow/providers/jira/provider.yaml index 6e040cb6197a4..9018b8283aab3 100644 --- a/airflow/providers/jira/provider.yaml +++ b/airflow/providers/jira/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-jira +name: Jira description: | `Atlassian Jira `__ diff --git a/airflow/providers/microsoft/azure/provider.yaml b/airflow/providers/microsoft/azure/provider.yaml index e937f0d9dd6e0..fa2a21035cd57 100644 --- a/airflow/providers/microsoft/azure/provider.yaml +++ b/airflow/providers/microsoft/azure/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-microsoft-azure +name: Microsoft Azure description: | `Microsoft Azure `__ diff --git a/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py b/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py index ccc7577a38f89..9b22af15fdb48 100644 --- a/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py +++ b/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py @@ -31,7 +31,7 @@ class AzureBlobStorageToGCSOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:AzureBlobStorageToGCSOperator` + :ref:`apache-airflow:howto/operator:AzureBlobStorageToGCSOperator` :param wasb_conn_id: Reference to the wasb connection. :type wasb_conn_id: str diff --git a/airflow/providers/microsoft/azure/transfers/local_to_adls.py b/airflow/providers/microsoft/azure/transfers/local_to_adls.py index bf6947b653de7..c4198a75c2de6 100644 --- a/airflow/providers/microsoft/azure/transfers/local_to_adls.py +++ b/airflow/providers/microsoft/azure/transfers/local_to_adls.py @@ -29,7 +29,7 @@ class LocalToAzureDataLakeStorageOperator(BaseOperator): .. seealso:: For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:LocalToAzureDataLakeStorageOperator` + :ref:`apache-airflow:howto/operator:LocalToAzureDataLakeStorageOperator` :param local_path: local path. Can be single file, directory (in which case, upload recursively) or glob pattern. Recursive glob patterns using `**` diff --git a/airflow/providers/microsoft/mssql/provider.yaml b/airflow/providers/microsoft/mssql/provider.yaml index 79719cc97c05d..b6930e3488b6b 100644 --- a/airflow/providers/microsoft/mssql/provider.yaml +++ b/airflow/providers/microsoft/mssql/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-microsoft-mssql +name: Microsoft SQL Server (MSSQL) description: | `Microsoft SQL Server (MSSQL) `__ diff --git a/airflow/providers/microsoft/winrm/provider.yaml b/airflow/providers/microsoft/winrm/provider.yaml index 7f84bb42daa49..f32080b2ef52d 100644 --- a/airflow/providers/microsoft/winrm/provider.yaml +++ b/airflow/providers/microsoft/winrm/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-microsoft-winrm +name: Windows Remote Management (WinRM) description: | `Windows Remote Management (WinRM) `__ diff --git a/airflow/providers/mongo/provider.yaml b/airflow/providers/mongo/provider.yaml index 7bdad89d6233e..b2e5b90667644 100644 --- a/airflow/providers/mongo/provider.yaml +++ b/airflow/providers/mongo/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-mongo +name: MongoDB description: | `MongoDB `__ diff --git a/airflow/providers/mysql/operators/mysql.py b/airflow/providers/mysql/operators/mysql.py index 85abe64a18a3f..325f2de18e62c 100644 --- a/airflow/providers/mysql/operators/mysql.py +++ b/airflow/providers/mysql/operators/mysql.py @@ -26,6 +26,10 @@ class MySqlOperator(BaseOperator): """ Executes sql code in a specific MySQL database + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`apache-airflow:howto/operator:MySqlOperator` + :param sql: the sql code to be executed. Can receive a str representing a sql statement, a list of str (sql statements), or reference to a template file. Template reference are recognized by str ending in '.sql' diff --git a/airflow/providers/mysql/provider.yaml b/airflow/providers/mysql/provider.yaml index a7390f7ada9f7..e6961874adbbe 100644 --- a/airflow/providers/mysql/provider.yaml +++ b/airflow/providers/mysql/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-mysql +name: MySQL description: | `MySQL `__ diff --git a/airflow/providers/odbc/hooks/odbc.py b/airflow/providers/odbc/hooks/odbc.py index fc12549422754..09b090a2d2598 100644 --- a/airflow/providers/odbc/hooks/odbc.py +++ b/airflow/providers/odbc/hooks/odbc.py @@ -29,7 +29,7 @@ class OdbcHook(DbApiHook): """ Interact with odbc data sources using pyodbc. - See :ref:`howto/connection/odbc` for full documentation. + See :ref:`apache-airflow:howto/connection/odbc` for full documentation. """ DEFAULT_SQLALCHEMY_SCHEME = 'mssql+pyodbc' diff --git a/airflow/providers/odbc/provider.yaml b/airflow/providers/odbc/provider.yaml index 26c275f9e9567..f3e24209b245f 100644 --- a/airflow/providers/odbc/provider.yaml +++ b/airflow/providers/odbc/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-odbc +name: ODBC description: | `ODBC `__ diff --git a/airflow/providers/openfaas/provider.yaml b/airflow/providers/openfaas/provider.yaml index 0731477f1220f..91c5f60c18b4c 100644 --- a/airflow/providers/openfaas/provider.yaml +++ b/airflow/providers/openfaas/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-openfaas +name: OpenFaaS description: | `OpenFaaS `__ diff --git a/airflow/providers/opsgenie/provider.yaml b/airflow/providers/opsgenie/provider.yaml index f2e0e2ae412c0..ea2caa754bfe6 100644 --- a/airflow/providers/opsgenie/provider.yaml +++ b/airflow/providers/opsgenie/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-opsgenie +name: Opsgenie description: | `Opsgenie `__ diff --git a/airflow/providers/oracle/provider.yaml b/airflow/providers/oracle/provider.yaml index 6afd04d8cf1f3..f3695fe3bb8f0 100644 --- a/airflow/providers/oracle/provider.yaml +++ b/airflow/providers/oracle/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-oracle +name: Orcle description: | `Oracle `__ diff --git a/airflow/providers/pagerduty/provider.yaml b/airflow/providers/pagerduty/provider.yaml index 49cc86e83c9fa..7a39dc39a3aee 100644 --- a/airflow/providers/pagerduty/provider.yaml +++ b/airflow/providers/pagerduty/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-pagerduty +name: Pagerduty description: | `Pagerduty `__ diff --git a/airflow/providers/papermill/provider.yaml b/airflow/providers/papermill/provider.yaml index 81722e312b779..112c2d2acbc4a 100644 --- a/airflow/providers/papermill/provider.yaml +++ b/airflow/providers/papermill/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-papermill +name: Papermill description: | `Papermill `__ diff --git a/airflow/providers/plexus/provider.yaml b/airflow/providers/plexus/provider.yaml index eff6c991462d7..b899e32f8b618 100644 --- a/airflow/providers/plexus/provider.yaml +++ b/airflow/providers/plexus/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-plexus +name: Plexus description: | `Plexus `__ diff --git a/airflow/providers/postgres/provider.yaml b/airflow/providers/postgres/provider.yaml index 48915ea88247c..55485c4252709 100644 --- a/airflow/providers/postgres/provider.yaml +++ b/airflow/providers/postgres/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-postgres +name: PostgreSQL description: | `PostgreSQL `__ diff --git a/airflow/providers/presto/provider.yaml b/airflow/providers/presto/provider.yaml index 882dc957e524f..8e8dccc67840c 100644 --- a/airflow/providers/presto/provider.yaml +++ b/airflow/providers/presto/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-presto +name: Presto description: | `Presto `__ diff --git a/airflow/providers/qubole/provider.yaml b/airflow/providers/qubole/provider.yaml index 837ac36f9361a..e2279c7cb2c3c 100644 --- a/airflow/providers/qubole/provider.yaml +++ b/airflow/providers/qubole/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-qubole +name: Qubole description: | `Qubole `__ diff --git a/airflow/providers/redis/provider.yaml b/airflow/providers/redis/provider.yaml index b9ab7078b7bb4..2a6df5bbe9d4e 100644 --- a/airflow/providers/redis/provider.yaml +++ b/airflow/providers/redis/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-redis +name: Redis description: | `Redis `__ diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/salesforce/provider.yaml index ea78fa43c6177..2651c4375bc52 100644 --- a/airflow/providers/salesforce/provider.yaml +++ b/airflow/providers/salesforce/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-salesforce +name: Salesforce description: | `Salesforce `__ diff --git a/airflow/providers/samba/provider.yaml b/airflow/providers/samba/provider.yaml index 8a3a0c1db9e9b..a440f747ba77e 100644 --- a/airflow/providers/samba/provider.yaml +++ b/airflow/providers/samba/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-samba +name: Samba description: | `Samba `__ diff --git a/airflow/providers/segment/provider.yaml b/airflow/providers/segment/provider.yaml index aa0edf981685c..04e987b147ced 100644 --- a/airflow/providers/segment/provider.yaml +++ b/airflow/providers/segment/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-segment +name: Segment description: | `Segment `__ diff --git a/airflow/providers/sendgrid/provider.yaml b/airflow/providers/sendgrid/provider.yaml index 3f2f3bad768e0..a22b8c35d0b35 100644 --- a/airflow/providers/sendgrid/provider.yaml +++ b/airflow/providers/sendgrid/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-sendgrid +name: Sendgrid description: | `Sendgrid `__ diff --git a/airflow/providers/sendgrid/utils/emailer.py b/airflow/providers/sendgrid/utils/emailer.py index 174a3a1ac51ca..f95fd3c25aede 100644 --- a/airflow/providers/sendgrid/utils/emailer.py +++ b/airflow/providers/sendgrid/utils/emailer.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Airflow module for emailer using sendgrid""" +"""Airflow module for email backend using sendgrid""" import base64 import logging diff --git a/airflow/providers/sftp/provider.yaml b/airflow/providers/sftp/provider.yaml index 277845f7fef84..cdfa225a7bef5 100644 --- a/airflow/providers/sftp/provider.yaml +++ b/airflow/providers/sftp/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-sftp +name: SFTP description: | `SSH File Transfer Protocol (SFTP) `__ diff --git a/airflow/providers/singularity/provider.yaml b/airflow/providers/singularity/provider.yaml index 73b1c95bfdd41..02b77bba656a6 100644 --- a/airflow/providers/singularity/provider.yaml +++ b/airflow/providers/singularity/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-singularity +name: Singularity description: | `Singularity `__ diff --git a/airflow/providers/slack/provider.yaml b/airflow/providers/slack/provider.yaml index 2742d7e6333fb..adb0e0a2351c4 100644 --- a/airflow/providers/slack/provider.yaml +++ b/airflow/providers/slack/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-slack +name: Slack description: | `Slack `__ diff --git a/airflow/providers/snowflake/operators/snowflake.py b/airflow/providers/snowflake/operators/snowflake.py index e6b910c40f2fc..c8e1d73555460 100644 --- a/airflow/providers/snowflake/operators/snowflake.py +++ b/airflow/providers/snowflake/operators/snowflake.py @@ -24,7 +24,11 @@ class SnowflakeOperator(BaseOperator): """ - Executes sql code in a Snowflake database + Executes SQL code in a Snowflake database + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`apache-airflow:howto/operator:SnowflakeOperator` :param snowflake_conn_id: reference to specific snowflake connection id :type snowflake_conn_id: str diff --git a/airflow/providers/snowflake/provider.yaml b/airflow/providers/snowflake/provider.yaml index 7a16a85f2dba8..021a7c5644944 100644 --- a/airflow/providers/snowflake/provider.yaml +++ b/airflow/providers/snowflake/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-snowflake +name: Snowflake description: | `Snowflake `__ diff --git a/airflow/providers/snowflake/transfers/s3_to_snowflake.py b/airflow/providers/snowflake/transfers/s3_to_snowflake.py index 758bec2cb1447..8461ef7f0e63a 100644 --- a/airflow/providers/snowflake/transfers/s3_to_snowflake.py +++ b/airflow/providers/snowflake/transfers/s3_to_snowflake.py @@ -28,6 +28,10 @@ class S3ToSnowflakeOperator(BaseOperator): """ Executes an COPY command to load files from s3 to Snowflake + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`apache-airflow:howto/operator:S3ToSnowflakeOperator` + :param s3_keys: reference to a list of S3 keys :type s3_keys: list :param table: reference to a specific table in snowflake database diff --git a/airflow/providers/snowflake/transfers/snowflake_to_slack.py b/airflow/providers/snowflake/transfers/snowflake_to_slack.py index a7fdd121057c8..108934eb9589a 100644 --- a/airflow/providers/snowflake/transfers/snowflake_to_slack.py +++ b/airflow/providers/snowflake/transfers/snowflake_to_slack.py @@ -36,6 +36,10 @@ class SnowflakeToSlackOperator(BaseOperator): allow the dataframe to be rendered nicely. For example, set 'slack_message' to {{ results_df | tabulate(tablefmt="pretty", headers="keys") }} to send the results to Slack as an ascii rendered table. + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`apache-airflow:howto/operator:SnowflakeToSlackOperator` + :param sql: The SQL statement to execute on Snowflake (templated) :type sql: str :param slack_message: The templated Slack message to send with the data returned from Snowflake. diff --git a/airflow/providers/sqlite/provider.yaml b/airflow/providers/sqlite/provider.yaml index b1afc3c3ca27c..326d8f1c18204 100644 --- a/airflow/providers/sqlite/provider.yaml +++ b/airflow/providers/sqlite/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-sqlite +name: SQLite description: | `SQLite `__ diff --git a/airflow/providers/ssh/provider.yaml b/airflow/providers/ssh/provider.yaml index 7fc1804f0378c..97e553782d4d6 100644 --- a/airflow/providers/ssh/provider.yaml +++ b/airflow/providers/ssh/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-ssh +name: SSH description: | `Secure Shell (SSH) `__ diff --git a/airflow/providers/vertica/provider.yaml b/airflow/providers/vertica/provider.yaml index 516c52089eb3a..4ddb8c723b84c 100644 --- a/airflow/providers/vertica/provider.yaml +++ b/airflow/providers/vertica/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-vertica +name: Vertica description: | `Vertica `__ diff --git a/airflow/providers/yandex/operators/yandexcloud_dataproc.py b/airflow/providers/yandex/operators/yandexcloud_dataproc.py index d6338fbab74d4..0818aa191c81b 100644 --- a/airflow/providers/yandex/operators/yandexcloud_dataproc.py +++ b/airflow/providers/yandex/operators/yandexcloud_dataproc.py @@ -25,10 +25,6 @@ class DataprocCreateClusterOperator(BaseOperator): """Creates Yandex.Cloud Data Proc cluster. - .. seealso:: - For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:DataprocCreateClusterOperator` - :param folder_id: ID of the folder in which cluster should be created. :type folder_id: Optional[str] :param cluster_name: Cluster name. Must be unique inside the folder. diff --git a/airflow/providers/yandex/provider.yaml b/airflow/providers/yandex/provider.yaml index 51029c80bcabc..884f936f9df6b 100644 --- a/airflow/providers/yandex/provider.yaml +++ b/airflow/providers/yandex/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-yandex +name: Yandex description: | Yandex including `Yandex.Cloud `__ diff --git a/airflow/providers/zendesk/provider.yaml b/airflow/providers/zendesk/provider.yaml index db8ab80e12448..c0f9e895449d5 100644 --- a/airflow/providers/zendesk/provider.yaml +++ b/airflow/providers/zendesk/provider.yaml @@ -17,6 +17,7 @@ --- package-name: apache-airflow-providers-zendesk +name: Zendesk description: | `Zendesk `__ diff --git a/dev/README.md b/dev/README.md index aaac3002dd83a..3ae58c91f6ccf 100644 --- a/dev/README.md +++ b/dev/README.md @@ -106,7 +106,7 @@ via [Official Apache Download for providers](https://downloads.apache.org/airflo [Official Apache Download for backport-providers](https://downloads.apache.org/airflow/backport-providers/) The full provider's list can be found here: -[Provider Packages Reference](https://airflow.readthedocs.io/en/latest/provider-packages-ref.html) +[Provider Packages Reference](https://s.apache.org/airflow-docs) There are also convenience packages released as "apache-airflow-providers" and "apache-airflow-backport-providers" separately in PyPI. diff --git a/docs/README.rst b/docs/README.rst new file mode 100644 index 0000000000000..dcc1b531dfeab --- /dev/null +++ b/docs/README.rst @@ -0,0 +1,126 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Documentation +############# + +This directory contains documentation for the Apache Airflow project and other packages that are closely related to it ie. providers packages. Documentation is built using `Sphinx `__. + +For Helm Chart, see: `/chart/README.md <../chart/READMe.md>`__ + +File structure +============== + +Currently in the ``/docs/`` directory there is documentation for the ``apache-airflow`` package. The ``apache-airflow-providers-*'` directories contains independent documentation for each provider package. +We are taking steps to move the documentation for ``apache-airflow`` package to the ``apache-airflow`` subdirectory to make the directory structure clearer. + +Development documentation preview +================================== + +Documentation from the ``master`` branch is built and automatically published: `s.apache.org/airflow-docs `_ + +Documentation for your PRs is available as downloadable artifact in GitHub Actions after the CI builds your PR. + +Building documentation +====================== + +To generate a local version you can use `<../BREEZE.rst>`_. + +The documentation build consists of verifying consistency of documentation and two steps: + +* spell checking +* building documentation + +You can only run one of the steps via ``--spellcheck-only`` or ``--docs-only``. + +.. code-block:: bash + + ./breeze build-docs + +or just to run spell-check + +.. code-block:: bash + + ./breeze build-docs -- --spellcheck-only + +or just to run documentation building + +.. code-block:: bash + + ./breeze build-docs -- --docs-only + +Troubleshooting +--------------- + +If you are creating ``example_dags`` directory, you need to create ``example_dags/__init__.py`` with Apache +license or copy another ``__init__.py`` file that contains the necessary license. + +Cross-referencing syntax +======================== + +Cross-references are generated by many semantic interpreted text roles. +Basically, you only need to write: + +.. code-block:: rst + + :role:`target` + +And a link will be +created to the item named *target* of the type indicated by *role*. The link's +text will be the same as *target*. + +You may supply an explicit title and reference target, like in reST direct +hyperlinks: + +.. code-block:: rst + + :role:`title ` + +This will refer to *target*, but the link text will be *title*. + +Here are practical examples: + +.. code-block:: rst + + :class:`airflow.models.dag.DAG` - link to Python API reference documentation + :doc:`/docs/operators` - link to other document + :ref:`handle` - link to section in current or another document + + .. _handle: + + Section title + ---------------------------------- + +Role ``:class:`` works well with references between packages. If you want to use other roles, it is a good idea to specify a package: + +.. code-block:: rst + + :doc:`apache-airflow:installation` + :ref:`apache-airflow-providers-google:write-logs-stackdriver` + +If you still feel confused then you can view more possible roles for our documentation: + +.. code-block:: bash + + ./list-roles.sh + +For more information, see: `Cross-referencing syntax `_ in Sphinx documentation + +Support +======= + +If you need help, write to `#documentation `__ channel on `Airflow's Slack `__ diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst new file mode 100644 index 0000000000000..0db56340d96dc --- /dev/null +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-amazon`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/amazon/index> diff --git a/docs/apache-airflow-providers-apache-cassandra/index.rst b/docs/apache-airflow-providers-apache-cassandra/index.rst new file mode 100644 index 0000000000000..d8b556ea14c7f --- /dev/null +++ b/docs/apache-airflow-providers-apache-cassandra/index.rst @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-cassandra`` +============================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/cassandra/index> diff --git a/docs/apache-airflow-providers-apache-druid/index.rst b/docs/apache-airflow-providers-apache-druid/index.rst new file mode 100644 index 0000000000000..7e279b6f75c0d --- /dev/null +++ b/docs/apache-airflow-providers-apache-druid/index.rst @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-druid`` +========================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/druid/index> diff --git a/docs/apache-airflow-providers-apache-hdfs/index.rst b/docs/apache-airflow-providers-apache-hdfs/index.rst new file mode 100644 index 0000000000000..960cda353b546 --- /dev/null +++ b/docs/apache-airflow-providers-apache-hdfs/index.rst @@ -0,0 +1,29 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-hdfs`` +================================= + + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/hdfs/index> diff --git a/docs/apache-airflow-providers-apache-hive/index.rst b/docs/apache-airflow-providers-apache-hive/index.rst new file mode 100644 index 0000000000000..23492def6e195 --- /dev/null +++ b/docs/apache-airflow-providers-apache-hive/index.rst @@ -0,0 +1,29 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-hive`` +======================================== + + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/hive/index> diff --git a/docs/apache-airflow-providers-apache-kylin/index.rst b/docs/apache-airflow-providers-apache-kylin/index.rst new file mode 100644 index 0000000000000..cbcbd520d1718 --- /dev/null +++ b/docs/apache-airflow-providers-apache-kylin/index.rst @@ -0,0 +1,29 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-kylin`` +========================================= + + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/kylin/index> diff --git a/docs/apache-airflow-providers-apache-livy/index.rst b/docs/apache-airflow-providers-apache-livy/index.rst new file mode 100644 index 0000000000000..e8d174c768b68 --- /dev/null +++ b/docs/apache-airflow-providers-apache-livy/index.rst @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-livy`` +======================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/livy/index> diff --git a/docs/apache-airflow-providers-apache-pig/index.rst b/docs/apache-airflow-providers-apache-pig/index.rst new file mode 100644 index 0000000000000..4ef11e801cf72 --- /dev/null +++ b/docs/apache-airflow-providers-apache-pig/index.rst @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-pig`` +======================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/pig/index> diff --git a/docs/apache-airflow-providers-apache-pinot/index.rst b/docs/apache-airflow-providers-apache-pinot/index.rst new file mode 100644 index 0000000000000..8b45f39e8e9f7 --- /dev/null +++ b/docs/apache-airflow-providers-apache-pinot/index.rst @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-pinot`` +========================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/pinot/index> diff --git a/docs/apache-airflow-providers-apache-spark/index.rst b/docs/apache-airflow-providers-apache-spark/index.rst new file mode 100644 index 0000000000000..ef5dc954721c0 --- /dev/null +++ b/docs/apache-airflow-providers-apache-spark/index.rst @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-spark`` +========================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/spark/index> diff --git a/docs/apache-airflow-providers-apache-sqoop/index.rst b/docs/apache-airflow-providers-apache-sqoop/index.rst new file mode 100644 index 0000000000000..77d4c46dc6759 --- /dev/null +++ b/docs/apache-airflow-providers-apache-sqoop/index.rst @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-sqoop`` +========================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/sqoop/index> diff --git a/docs/apache-airflow-providers-celery/index.rst b/docs/apache-airflow-providers-celery/index.rst new file mode 100644 index 0000000000000..3ab50b5708ff1 --- /dev/null +++ b/docs/apache-airflow-providers-celery/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-celery`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/celery/index> diff --git a/docs/apache-airflow-providers-cloudant/index.rst b/docs/apache-airflow-providers-cloudant/index.rst new file mode 100644 index 0000000000000..f8ed1895475ba --- /dev/null +++ b/docs/apache-airflow-providers-cloudant/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-cloudant`` +===================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/cloudant/index> diff --git a/docs/apache-airflow-providers-cncf-kubernetes/index.rst b/docs/apache-airflow-providers-cncf-kubernetes/index.rst new file mode 100644 index 0000000000000..aba9809af98b6 --- /dev/null +++ b/docs/apache-airflow-providers-cncf-kubernetes/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-cncf-kubernetes`` +============================================ + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/cncf/kubernetes/index> diff --git a/docs/apache-airflow-providers-databricks/index.rst b/docs/apache-airflow-providers-databricks/index.rst new file mode 100644 index 0000000000000..75d360353cd2c --- /dev/null +++ b/docs/apache-airflow-providers-databricks/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-databricks`` +============================================ + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/databricks/index> diff --git a/docs/apache-airflow-providers-datadog/index.rst b/docs/apache-airflow-providers-datadog/index.rst new file mode 100644 index 0000000000000..bd044e2eee5af --- /dev/null +++ b/docs/apache-airflow-providers-datadog/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-datadog`` +==================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/datadog/index> diff --git a/docs/apache-airflow-providers-dingding/index.rst b/docs/apache-airflow-providers-dingding/index.rst new file mode 100644 index 0000000000000..1b5e733751584 --- /dev/null +++ b/docs/apache-airflow-providers-dingding/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-dingding`` +===================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/dingding/index> diff --git a/docs/apache-airflow-providers-discord/index.rst b/docs/apache-airflow-providers-discord/index.rst new file mode 100644 index 0000000000000..fd5332ac6bcb2 --- /dev/null +++ b/docs/apache-airflow-providers-discord/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-discord`` +==================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/discord/index> diff --git a/docs/apache-airflow-providers-docker/index.rst b/docs/apache-airflow-providers-docker/index.rst new file mode 100644 index 0000000000000..0202abb5aebeb --- /dev/null +++ b/docs/apache-airflow-providers-docker/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-docker`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/docker/index> diff --git a/docs/apache-airflow-providers-elasticsearch/index.rst b/docs/apache-airflow-providers-elasticsearch/index.rst new file mode 100644 index 0000000000000..6ca4ce81f36ad --- /dev/null +++ b/docs/apache-airflow-providers-elasticsearch/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-elasticsearch`` +========================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/elasticsearch/index> diff --git a/docs/apache-airflow-providers-exasol/index.rst b/docs/apache-airflow-providers-exasol/index.rst new file mode 100644 index 0000000000000..3958d825c7eab --- /dev/null +++ b/docs/apache-airflow-providers-exasol/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-exasol`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/exasol/index> diff --git a/docs/apache-airflow-providers-facebook/index.rst b/docs/apache-airflow-providers-facebook/index.rst new file mode 100644 index 0000000000000..e87b7531d8cfe --- /dev/null +++ b/docs/apache-airflow-providers-facebook/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-facebook`` +===================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/facebook/index> diff --git a/docs/apache-airflow-providers-ftp/index.rst b/docs/apache-airflow-providers-ftp/index.rst new file mode 100644 index 0000000000000..07e0d52ddefc3 --- /dev/null +++ b/docs/apache-airflow-providers-ftp/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-ftp`` +================================ + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/ftp/index> diff --git a/docs/apache-airflow-providers-google/api-auth-backend/google-openid.rst b/docs/apache-airflow-providers-google/api-auth-backend/google-openid.rst new file mode 100644 index 0000000000000..f1046cc99b455 --- /dev/null +++ b/docs/apache-airflow-providers-google/api-auth-backend/google-openid.rst @@ -0,0 +1,69 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Google OpenID authentication +'''''''''''''''''''''''''''' + +You can also configure +`Google OpenID `__ +for authentication. To enable it, set the following option in the configuration: + +.. code-block:: ini + + [api] + auth_backend = airflow.providers.google.common.auth_backend.google_openid + +It is also highly recommended to configure an OAuth2 audience so that the generated tokens are restricted to +use by Airflow only. + +.. code-block:: ini + + [api] + google_oauth2_audience = project-id-random-value.apps.googleusercontent.com + +You can also configure the CLI to send request to a remote API instead of making a query to a local database. + +.. code-block:: ini + + [cli] + api_client = airflow.api.client.json_client + endpoint_url = http://remote-host.example.org/ + +You can also set up a service account key. If omitted, authorization based on `the Application Default +Credentials `__ +will be used. + +.. code-block:: ini + + [cli] + google_key_path = + +You can get the authorization token with the ``gcloud auth print-identity-token`` command. An example request +look like the following. + + .. code-block:: bash + + ENDPOINT_URL="http://locahost:8080/" + + AUDIENCE="project-id-random-value.apps.googleusercontent.com" + ID_TOKEN="$(gcloud auth print-identity-token "--audience=${AUDIENCE}")" + + curl -X GET \ + "${ENDPOINT_URL}/api/experimental/pools" \ + -H 'Content-Type: application/json' \ + -H 'Cache-Control: no-cache' \ + -H "Authorization: Bearer ${ID_TOKEN}" diff --git a/docs/apache-airflow-providers-google/configurations-ref.rst b/docs/apache-airflow-providers-google/configurations-ref.rst new file mode 100644 index 0000000000000..a64c1178da6a6 --- /dev/null +++ b/docs/apache-airflow-providers-google/configurations-ref.rst @@ -0,0 +1,71 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Configuration Reference +======================= + +This page contains the list of all the provider configurations that you +can set in ``airflow.cfg`` file or using environment variables. + +.. note:: + For more information on setting the configuration, see :doc:`howto/set-config` + +.. contents:: Sections: + :local: + :depth: 1 + +.. jinja:: config_ctx + + {% for section in configs %} + + [{{ section["name"] }}] + {{ "=" * (section["name"]|length + 2) }} + + {% if section["description"] %} + {{ section["description"] }} + {% endif %} + + {% for option in section["options"] %} + + .. _config:{{ section["name"] }}__{{ option["name"] }}: + + {{ option["name"] }} + {{ "-" * option["name"]|length }} + + {% if option["version_added"] %} + .. versionadded:: {{ option["version_added"] }} + {% endif %} + + {% if option["description"] %} + {{ option["description"] }} + {% endif %} + + {% if option.get("see_also") %} + .. seealso:: {{ option["see_also"] }} + {% endif %} + + :Type: {{ option["type"] }} + :Default: ``{{ "''" if option["default"] == "" else option["default"] }}`` + :Environment Variable: ``AIRFLOW__{{ section["name"] | upper }}__{{ option["name"] | upper }}`` + {% if option["example"] %} + :Example: + ``{{ option["example"] }}`` + {% endif %} + + {% endfor %} + {% endfor %} diff --git a/docs/howto/connection/gcp.rst b/docs/apache-airflow-providers-google/connections/gcp.rst similarity index 99% rename from docs/howto/connection/gcp.rst rename to docs/apache-airflow-providers-google/connections/gcp.rst index 6ef15c134792c..b37e251e69c3a 100644 --- a/docs/howto/connection/gcp.rst +++ b/docs/apache-airflow-providers-google/connections/gcp.rst @@ -22,8 +22,7 @@ Google Cloud Connection ================================ -The Google Cloud connection type enables the :ref:`Google Cloud Integrations -`. +The Google Cloud connection type enables the Google Cloud Integrations. Authenticating to Google Cloud ------------------------------ diff --git a/docs/howto/connection/gcp_sql.rst b/docs/apache-airflow-providers-google/connections/gcp_sql.rst similarity index 100% rename from docs/howto/connection/gcp_sql.rst rename to docs/apache-airflow-providers-google/connections/gcp_sql.rst diff --git a/docs/howto/connection/gcp_ssh.rst b/docs/apache-airflow-providers-google/connections/gcp_ssh.rst similarity index 97% rename from docs/howto/connection/gcp_ssh.rst rename to docs/apache-airflow-providers-google/connections/gcp_ssh.rst index 4f130ce9deeae..a9fb7552e8bf1 100644 --- a/docs/howto/connection/gcp_ssh.rst +++ b/docs/apache-airflow-providers-google/connections/gcp_ssh.rst @@ -27,7 +27,7 @@ file from/to the remote server using :class:`~airflow.providers.sftp.operators.s Configuring the Connection -------------------------- -For authorization to Google Cloud services, this connection should contain a configuration identical to the :doc:`/howto/connection/gcp`. +For authorization to Google Cloud services, this connection should contain a configuration identical to the :doc:`/connections/gcp`. All parameters for a Google Cloud connection are also valid configuration parameters for this connection. In addition, additional connection parameters to the instance are supported. It is also possible to pass them diff --git a/docs/apache-airflow-providers-google/connections/index.rst b/docs/apache-airflow-providers-google/connections/index.rst new file mode 100644 index 0000000000000..2b8afb41df936 --- /dev/null +++ b/docs/apache-airflow-providers-google/connections/index.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Connection Types +---------------- + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/docs/apache-airflow-providers-google/index.rst b/docs/apache-airflow-providers-google/index.rst new file mode 100644 index 0000000000000..edb18999535ee --- /dev/null +++ b/docs/apache-airflow-providers-google/index.rst @@ -0,0 +1,39 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-google`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: Guides + + Connection types + Logging handlers + Secrets backends + API Authentication backend + Operators + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/google/index> + Configuration diff --git a/docs/apache-airflow-providers-google/logging/gcs.rst b/docs/apache-airflow-providers-google/logging/gcs.rst new file mode 100644 index 0000000000000..4e6083a3b25a0 --- /dev/null +++ b/docs/apache-airflow-providers-google/logging/gcs.rst @@ -0,0 +1,57 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _write-logs-gcp: + +Writing Logs to Google Cloud Storage +------------------------------------ + +Remote logging to Google Cloud Storage uses an existing Airflow connection to read or write logs. If you +don't have a connection properly setup, this process will fail. + +Follow the steps below to enable Google Cloud Storage logging. + +To enable this feature, ``airflow.cfg`` must be configured as in this +example: + +.. code-block:: ini + + [logging] + # Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. + # Users must supply an Airflow connection id that provides access to the storage + # location. If remote_logging is set to true, see UPDATING.md for additional + # configuration requirements. + remote_logging = True + remote_base_log_folder = gs://my-bucket/path/to/logs + +#. By default Application Default Credentials are used to obtain credentials. You can also + set ``google_key_path`` option in ``[logging]`` section, if you want to use your own service account. +#. Make sure a Google Cloud account have read and write access to the Google Cloud Storage bucket defined above in ``remote_base_log_folder``. +#. Install the ``google`` package, like so: ``pip install 'apache-airflow[google]'``. +#. Restart the Airflow webserver and scheduler, and trigger (or wait for) a new task execution. +#. Verify that logs are showing up for newly executed tasks in the bucket you have defined. +#. Verify that the Google Cloud Storage viewer is working in the UI. Pull up a newly executed task, and verify that you see something like: + +.. code-block:: none + + *** Reading remote log from gs:///example_bash_operator/run_this_last/2017-10-03T00:00:00/16.log. + [2017-10-03 21:57:50,056] {cli.py:377} INFO - Running on host chrisr-00532 + [2017-10-03 21:57:50,093] {base_task_runner.py:115} INFO - Running: ['bash', '-c', 'airflow tasks run example_bash_operator run_this_last 2017-10-03T00:00:00 --job-id 47 --raw -S DAGS_FOLDER/example_dags/example_bash_operator.py'] + [2017-10-03 21:57:51,264] {base_task_runner.py:98} INFO - Subtask: [2017-10-03 21:57:51,263] {__init__.py:45} INFO - Using executor SequentialExecutor + [2017-10-03 21:57:51,306] {base_task_runner.py:98} INFO - Subtask: [2017-10-03 21:57:51,306] {models.py:186} INFO - Filling up the DagBag from /airflow/dags/example_dags/example_bash_operator.py + +**Note** that the path to the remote log file is listed on the first line. diff --git a/docs/apache-airflow-providers-google/logging/index.rst b/docs/apache-airflow-providers-google/logging/index.rst new file mode 100644 index 0000000000000..2b8afb41df936 --- /dev/null +++ b/docs/apache-airflow-providers-google/logging/index.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Connection Types +---------------- + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/docs/apache-airflow-providers-google/logging/stackdriver.rst b/docs/apache-airflow-providers-google/logging/stackdriver.rst new file mode 100644 index 0000000000000..414c9e62c6bee --- /dev/null +++ b/docs/apache-airflow-providers-google/logging/stackdriver.rst @@ -0,0 +1,63 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _write-logs-stackdriver: + +Writing Logs to Google Stackdriver +---------------------------------- + +Airflow can be configured to read and write task logs in `Google Stackdriver Logging `__. + +To enable this feature, ``airflow.cfg`` must be configured as in this +example: + +.. code-block:: ini + + [logging] + # Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. + # Users must supply an Airflow connection id that provides access to the storage + # location. If remote_logging is set to true, see UPDATING.md for additional + # configuration requirements. + remote_logging = True + remote_base_log_folder = stackdriver://logs-name + +All configuration options are in the ``[logging]`` section. + +The value of field ``remote_logging`` must always be set to ``True`` for this feature to work. +Turning this option off will result in data not being sent to Stackdriver. +The ``remote_base_log_folder`` option contains the URL that specifies the type of handler to be used. +For integration with Stackdriver, this option should start with ``stackdriver:///``. +The path section of the URL specifies the name of the log e.g. ``stackdriver://airflow-tasks`` writes +logs under the name ``airflow-tasks``. + +You can set ``google_key_path`` option in the ``[logging]`` section to specify the path to `the service +account key file `__. +If omitted, authorization based on `the Application Default Credentials +`__ will +be used. + +By using the ``logging_config_class`` option you can get :ref:`advanced features ` of +this handler. Details are available in the handler's documentation - +:class:`~airflow.providers.google.cloud.log.stackdriver_task_handler.StackdriverTaskHandler`. + + +.. _log-link-stackdriver: + +Google Stackdriver External Link +'''''''''''''''''''''''''''''''' + +Airflow automatically shows a link to Google Stackdriver when configured to use it as the remote logging system. diff --git a/docs/howto/operator/google/_partials/prerequisite_tasks.rst b/docs/apache-airflow-providers-google/operators/_partials/prerequisite_tasks.rst similarity index 90% rename from docs/howto/operator/google/_partials/prerequisite_tasks.rst rename to docs/apache-airflow-providers-google/operators/_partials/prerequisite_tasks.rst index ae4bb9eb51197..a509ecd19141d 100644 --- a/docs/howto/operator/google/_partials/prerequisite_tasks.rst +++ b/docs/apache-airflow-providers-google/operators/_partials/prerequisite_tasks.rst @@ -28,6 +28,6 @@ To use these operators, you must do a few things: pip install 'apache-airflow[google]' - Detailed information is available for :doc:`/installation`. + Detailed information is available for :doc:`Installation `. - * :doc:`Setup a Google Cloud Connection `. + * :doc:`Setup a Google Cloud Connection `. diff --git a/docs/howto/operator/google/ads.rst b/docs/apache-airflow-providers-google/operators/ads.rst similarity index 91% rename from docs/howto/operator/google/ads.rst rename to docs/apache-airflow-providers-google/operators/ads.rst index 70cdb638c223c..e9ea26f958e67 100644 --- a/docs/howto/operator/google/ads.rst +++ b/docs/apache-airflow-providers-google/operators/ads.rst @@ -27,7 +27,7 @@ businesses to advertise on Google Search, YouTube and other sites across the web Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GoogleAdsToGcsOperator: @@ -37,7 +37,7 @@ Google Ads to GCS To query the Google Ads API and generate a CSV report of the results use :class:`~airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator`. -.. exampleinclude:: /../airflow/providers/google/ads/example_dags/example_ads.py +.. exampleinclude:: /../../airflow/providers/google/ads/example_dags/example_ads.py :language: python :dedent: 4 :start-after: [START howto_google_ads_to_gcs_operator] @@ -56,7 +56,7 @@ Upload Google Ads Accounts to GCS To upload Google Ads accounts to Google Cloud Storage bucket use the :class:`~airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsListAccountsOperator`. -.. exampleinclude:: /../airflow/providers/google/ads/example_dags/example_ads.py +.. exampleinclude:: /../../airflow/providers/google/ads/example_dags/example_ads.py :language: python :dedent: 4 :start-after: [START howto_ads_list_accounts_operator] diff --git a/docs/howto/operator/google/cloud/automl.rst b/docs/apache-airflow-providers-google/operators/cloud/automl.rst similarity index 83% rename from docs/howto/operator/google/cloud/automl.rst rename to docs/apache-airflow-providers-google/operators/cloud/automl.rst index 2c55fb22efe22..e3ab6445058b0 100644 --- a/docs/howto/operator/google/cloud/automl.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/automl.rst @@ -31,7 +31,7 @@ and then integrate those models into your applications and web sites. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudAutoMLDocuments: @@ -48,7 +48,7 @@ To create a Google AutoML dataset you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLCreateDatasetOperator`. The operator returns dataset id in :ref:`XCom ` under ``dataset_id`` key. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_create_dataset] @@ -57,7 +57,7 @@ The operator returns dataset id in :ref:`XCom ` under ``dataset_i After creating a dataset you can use it to import some data using :class:`~airflow.providers.google.cloud.operators.automl.AutoMLImportDataOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_import_data] @@ -66,7 +66,7 @@ After creating a dataset you can use it to import some data using To update dataset you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLTablesUpdateDatasetOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_update_dataset] @@ -81,7 +81,7 @@ Listing Table And Columns Specs To list table specs you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLTablesListTableSpecsOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_specs] @@ -90,7 +90,7 @@ To list table specs you can use To list column specs you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLTablesListColumnSpecsOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_column_specs] @@ -109,7 +109,7 @@ To create a Google AutoML model you can use The operator will wait for the operation to complete. Additionally the operator returns the id of model in :ref:`XCom ` under ``model_id`` key. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_create_model] @@ -118,7 +118,7 @@ returns the id of model in :ref:`XCom ` under ``model_id`` key. To get existing model one can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLGetModelOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_get_model] @@ -127,7 +127,7 @@ To get existing model one can use Once a model is created it could be deployed using :class:`~airflow.providers.google.cloud.operators.automl.AutoMLDeployModelOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_deploy_model] @@ -136,7 +136,7 @@ Once a model is created it could be deployed using If you wish to delete a model you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLDeleteModelOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_delete_model] @@ -153,13 +153,13 @@ To obtain predictions from Google Cloud AutoML model you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLBatchPredictOperator`. In the first case the model must be deployed. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_prediction] :end-before: [END howto_operator_prediction] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_prediction] @@ -175,7 +175,7 @@ You can get a list of AutoML models using :class:`~airflow.providers.google.cloud.operators.automl.AutoMLListDatasetOperator`. The operator returns list of datasets ids in :ref:`XCom ` under ``dataset_id_list`` key. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_list_dataset] @@ -184,7 +184,7 @@ of datasets ids in :ref:`XCom ` under ``dataset_id_list`` key. To delete a model you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLDeleteDatasetOperator`. The delete operator allows also to pass list or coma separated string of datasets ids to be deleted. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_automl_tables.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_automl_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_dataset] diff --git a/docs/howto/operator/google/cloud/bigquery.rst b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst similarity index 84% rename from docs/howto/operator/google/cloud/bigquery.rst rename to docs/apache-airflow-providers-google/operators/cloud/bigquery.rst index 4d04d6c27e94d..e471842197643 100644 --- a/docs/howto/operator/google/cloud/bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst @@ -33,7 +33,7 @@ data. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst Manage datasets ^^^^^^^^^^^^^^^ @@ -46,7 +46,7 @@ Create dataset To create an empty dataset in a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_dataset] @@ -62,7 +62,7 @@ To get the details of an existing dataset you can use This operator returns a `Dataset Resource `__. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset] @@ -76,7 +76,7 @@ List tables in dataset To retrieve the list of tables in a given dataset use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryGetDatasetTablesOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset_tables] @@ -93,7 +93,7 @@ To patch a dataset in BigQuery you can use Note, this operator only replaces fields that are provided in the submitted dataset resource. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_patch_dataset] @@ -110,7 +110,7 @@ To update a dataset in BigQuery you can use The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_dataset] @@ -124,7 +124,7 @@ Delete dataset To delete an existing dataset from a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_dataset] @@ -147,7 +147,7 @@ ways. You may either directly pass the schema fields in, or you may point the operator to a Google Cloud Storage object name. The object in Google Cloud Storage must be a JSON file with the schema fields in it. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table] @@ -155,7 +155,7 @@ Storage must be a JSON file with the schema fields in it. You can use this operator to create a view on top of an existing table. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_view] @@ -175,7 +175,7 @@ Similarly to you may either directly pass the schema fields in, or you may point the operator to a Google Cloud Storage object name. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_external_table] @@ -196,7 +196,7 @@ returned list will be equal to the number of rows fetched. Each element in the list will again be a list where elements would represent the column values for that row. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_get_data] @@ -213,7 +213,7 @@ To upsert a table you can use This operator either updates the existing table or creates a new, empty table in the given dataset. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_upsert_table] @@ -227,7 +227,7 @@ Delete table To delete an existing table you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_table] @@ -235,7 +235,7 @@ To delete an existing table you can use You can also use this operator to delete a view. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_view] @@ -248,7 +248,7 @@ Execute BigQuery jobs Let's say you would like to execute the following query. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py :language: python :dedent: 0 :start-after: [START howto_operator_bigquery_query] @@ -258,7 +258,7 @@ To execute the SQL query in a specific BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryInsertJobOperator` with proper query job configuration that can be Jinja templated. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_insert_job] @@ -270,7 +270,7 @@ For more information on types of BigQuery job please check If you want to include some files in your configuration you can use ``include`` clause of Jinja template language as follow: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_select_job] @@ -299,7 +299,7 @@ This operator expects a sql query that will return a single row. Each value on that first row is evaluated using python ``bool`` casting. If any of the values return ``False`` the check is failed and errors out. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_check] @@ -317,7 +317,7 @@ This operator expects a sql query that will return a single row. Each value on that first row is evaluated against ``pass_value`` which can be either a string or numeric value. If numeric, you can also specify ``tolerance``. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_value_check] @@ -332,7 +332,7 @@ To check that the values of metrics given as SQL expressions are within a certai tolerance of the ones from ``days_back`` before you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_interval_check] @@ -350,7 +350,7 @@ use the ``{{ ds_nodash }}`` macro as the table name suffix. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_sensors.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table] @@ -362,7 +362,7 @@ Check that a Table Partition exists To check that a table exists and has a partition you can use. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_sensors.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition] diff --git a/docs/howto/operator/google/cloud/bigquery_dts.rst b/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst similarity index 91% rename from docs/howto/operator/google/cloud/bigquery_dts.rst rename to docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst index e75593f7d19dc..a1a28f5ce02b5 100644 --- a/docs/howto/operator/google/cloud/bigquery_dts.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst @@ -33,7 +33,7 @@ gain access to data connectors that allow you to easily transfer data from Terad Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:BigQueryDTSDocuments: @@ -53,7 +53,7 @@ for example :class:`~airflow.providers.google.cloud.operators.bigquery_dts.BigQu scheduling option is present in passed configuration. If present then nothing is done, otherwise it's value is set to ``True``. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py :language: python :start-after: [START howto_bigquery_dts_create_args] :end-before: [END howto_bigquery_dts_create_args] @@ -61,7 +61,7 @@ set to ``True``. You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Basic usage of the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_create_data_transfer] @@ -84,7 +84,7 @@ To delete DTS transfer configuration you can use Basic usage of the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_delete_data_transfer] @@ -105,7 +105,7 @@ Start manual transfer runs to be executed now with schedule_time equal to curren Basic usage of the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_start_transfer] @@ -118,7 +118,7 @@ parameters which allows you to dynamically determine values. To check if operation succeeded you can use :class:`~airflow.providers.google.cloud.sensors.bigquery_dts.BigQueryDataTransferServiceTransferRunSensor`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_dts_sensor] diff --git a/docs/howto/operator/google/cloud/bigtable.rst b/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst similarity index 90% rename from docs/howto/operator/google/cloud/bigtable.rst rename to docs/apache-airflow-providers-google/operators/cloud/bigtable.rst index ce1b1a26b03ba..76528af1f3792 100644 --- a/docs/howto/operator/google/cloud/bigtable.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst @@ -27,7 +27,7 @@ Google Cloud Bigtable Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:BigtableCreateInstanceOperator: @@ -47,7 +47,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigtable.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_create] @@ -70,7 +70,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigtable.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_update] @@ -90,7 +90,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigtable.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_delete] @@ -110,7 +110,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigtable.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_cluster_update] @@ -134,7 +134,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigtable.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_create] @@ -162,7 +162,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigtable.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_delete] @@ -187,7 +187,7 @@ timeout hits and does not raise any exception. Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_bigtable.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_wait_for_replication] diff --git a/docs/howto/operator/google/cloud/cloud_build.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst similarity index 84% rename from docs/howto/operator/google/cloud/cloud_build.rst rename to docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst index bdef35a60b020..103398e795ffd 100644 --- a/docs/howto/operator/google/cloud/cloud_build.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst @@ -33,7 +33,7 @@ artifacts such as Docker containers or Java archives. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudBuildBuild: @@ -43,7 +43,7 @@ Build configuration overview In order to trigger a build, it is necessary to pass the build configuration. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_build.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_build.py :language: python :dedent: 0 :start-after: [START howto_operator_gcp_create_build_from_storage_body] @@ -51,7 +51,7 @@ In order to trigger a build, it is necessary to pass the build configuration. The source code for the build can come from `Google Cloud Build Storage `__: -.. literalinclude:: /../tests/providers/google/cloud/operators/test_cloud_build.py +.. literalinclude:: /../../tests/providers/google/cloud/operators/test_cloud_build.py :language: python :dedent: 12 :start-after: [START howto_operator_gcp_cloud_build_source_gcs_dict] @@ -59,7 +59,7 @@ The source code for the build can come from `Google Cloud Build Storage `__. -.. literalinclude:: /../tests/providers/google/cloud/operators/test_cloud_build.py +.. literalinclude:: /../../tests/providers/google/cloud/operators/test_cloud_build.py :language: python :dedent: 12 :start-after: [START howto_operator_gcp_cloud_build_source_repo_dict] @@ -75,7 +75,7 @@ In addition, a build can refer to source stored in `Google Cloud Source Reposito It is also possible to specify it using the URL: -.. literalinclude:: /../tests/providers/google/cloud/operators/test_cloud_build.py +.. literalinclude:: /../../tests/providers/google/cloud/operators/test_cloud_build.py :language: python :dedent: 12 :start-after: [START howto_operator_gcp_cloud_build_source_repo_url] @@ -83,7 +83,7 @@ It is also possible to specify it using the URL: It is also possible to specify it using a YAML or JSON format. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_build.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_build.py :language: python :dedent: 0 :start-after: [START howto_operator_gcp_create_build_from_yaml_body] @@ -100,7 +100,7 @@ Trigger a build Trigger a build is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildCreateBuildOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_build.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_storage] @@ -111,7 +111,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_build.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_storage_result] diff --git a/docs/howto/operator/google/cloud/cloud_memorystore.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst similarity index 88% rename from docs/howto/operator/google/cloud/cloud_memorystore.rst rename to docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst index 67a341e3c612d..f84de92c3b799 100644 --- a/docs/howto/operator/google/cloud/cloud_memorystore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst @@ -32,7 +32,7 @@ of managing complex Redis deployments. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudMemorystoreInstance: @@ -45,7 +45,7 @@ presented as a compatible dictionary also. Here is an example of instance -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :start-after: [START howto_operator_instance] :end-before: [END howto_operator_instance] @@ -63,7 +63,7 @@ make a use of the service account listed under ``persistenceIamIdentity``. You can use :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator` operator to set permissions. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_set_acl_permission] @@ -80,7 +80,7 @@ Create instance Create a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance] @@ -91,7 +91,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance_result] @@ -106,7 +106,7 @@ Delete instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreDeleteInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_instance] @@ -124,7 +124,7 @@ Export instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreExportInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_export_instance] @@ -142,7 +142,7 @@ Failover instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreFailoverInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_failover_instance] @@ -160,7 +160,7 @@ Get instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreGetInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_get_instance] @@ -178,7 +178,7 @@ Import instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreImportOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_import_instance] @@ -196,7 +196,7 @@ List instances List a instances is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreListInstancesOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances] @@ -207,7 +207,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances_result] @@ -221,7 +221,7 @@ Update instance Update a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreUpdateInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_update_instance] @@ -240,7 +240,7 @@ Scale instance Scale a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreScaleInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_scale_instance] diff --git a/docs/howto/operator/google/cloud/cloud_memorystore_memcached.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst similarity index 86% rename from docs/howto/operator/google/cloud/cloud_memorystore_memcached.rst rename to docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst index 6483c75980002..c319359b17c5f 100644 --- a/docs/howto/operator/google/cloud/cloud_memorystore_memcached.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst @@ -32,7 +32,7 @@ Memcached deployments. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudMemorystoreMemcachedInstance: @@ -45,7 +45,7 @@ The object can be presented as a compatible dictionary also. Here is an example of instance -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :start-after: [START howto_operator_memcached_instance] :end-before: [END howto_operator_memcached_instance] @@ -60,7 +60,7 @@ Create a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedCreateInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance_memcached] @@ -76,7 +76,7 @@ Delete an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedDeleteInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_instance_memcached] @@ -92,7 +92,7 @@ Get an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedGetInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_get_instance_memcached] @@ -108,7 +108,7 @@ List instances is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedListInstancesOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances_memcached] @@ -124,7 +124,7 @@ Updating an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateInstanceOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_update_instance_memcached] @@ -142,7 +142,7 @@ and :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedApplyParametersOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py :language: python :dedent: 4 :start-after: [START howto_operator_update_and_apply_parameters_memcached] diff --git a/docs/howto/operator/google/cloud/cloud_sql.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst similarity index 86% rename from docs/howto/operator/google/cloud/cloud_sql.rst rename to docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst index d9508555dc6b8..e9f8874d4f22d 100644 --- a/docs/howto/operator/google/cloud/cloud_sql.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst @@ -27,7 +27,7 @@ Google Cloud SQL Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudSQLCreateInstanceDatabaseOperator: @@ -46,7 +46,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_create] @@ -54,7 +54,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Example request body: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_db_create_body] :end-before: [END howto_operator_cloudsql_db_create_body] @@ -62,7 +62,7 @@ Example request body: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_create_template_fields] @@ -91,7 +91,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_delete] @@ -100,7 +100,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_delete_template_fields] @@ -131,7 +131,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_patch] @@ -139,7 +139,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Example request body: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_db_patch_body] :end-before: [END howto_operator_cloudsql_db_patch_body] @@ -147,7 +147,7 @@ Example request body: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_patch_template_fields] @@ -178,7 +178,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_delete] @@ -187,7 +187,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Note: If the instance has read or failover replicas you need to delete them before you delete the primary instance. Replicas are deleted the same way as primary instances: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_replicas_delete] @@ -196,7 +196,7 @@ Replicas are deleted the same way as primary instances: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_delete_template_fields] @@ -228,7 +228,7 @@ Arguments Example body defining the export operation: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_export_body] :end-before: [END howto_operator_cloudsql_export_body] @@ -239,7 +239,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export] @@ -248,7 +248,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_export_template_fields] @@ -273,7 +273,7 @@ To grant the service account with the appropriate WRITE permissions for the GCS you can use the :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`, as shown in the example: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export_gcs_permissions] @@ -313,7 +313,7 @@ Arguments Example body defining the import operation: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_import_body] :end-before: [END howto_operator_cloudsql_import_body] @@ -324,7 +324,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_import] @@ -333,7 +333,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_import_template_fields] @@ -358,7 +358,7 @@ To grant the service account with the appropriate READ permissions for the GCS o you can use the :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`, as shown in the example: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_import_gcs_permissions] @@ -384,14 +384,14 @@ Arguments Example body defining the instance with failover replica: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_create_body] :end-before: [END howto_operator_cloudsql_create_body] Example body defining read replica for the instance above: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_create_replica] :end-before: [END howto_operator_cloudsql_create_replica] @@ -405,7 +405,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_create] @@ -414,7 +414,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_create_template_fields] @@ -445,7 +445,7 @@ Arguments Example body defining the instance: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_patch_body] :end-before: [END howto_operator_cloudsql_patch_body] @@ -456,7 +456,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_patch] @@ -465,7 +465,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_patch_template_fields] @@ -491,13 +491,13 @@ starting from public IP plain connection through public IP with SSL or both TCP socket connection via Cloud SQL Proxy. The proxy is downloaded and started/stopped dynamically as needed by the operator. -There is a *gcpcloudsql://* connection type that you should use to define what +There is a ``gcpcloudsql://*`` connection type that you should use to define what kind of connectivity you want the operator to use. The connection is a "meta" type of connection. It is not used to make an actual connectivity on its own, but it determines whether Cloud SQL Proxy should be started by ``CloudSQLDatabaseHook`` and what kind of database connection (Postgres or MySQL) should be created dynamically to connect to Cloud SQL via public IP address or via the proxy. -The 'CloudSqlDatabaseHook` uses +The ``CloudSqlDatabaseHook`` uses :class:`~airflow.providers.google.cloud.hooks.cloud_sql.CloudSqlProxyRunner` to manage Cloud SQL Proxy lifecycle (each task has its own Cloud SQL Proxy) @@ -530,7 +530,7 @@ NFS-like volumes in the same path for all the workers. Example connection definitions for all connectivity cases. Note that all the components of the connection URI should be URL-encoded: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py :language: python :start-after: [START howto_operator_cloudsql_query_connections] :end-before: [END howto_operator_cloudsql_query_connections] @@ -542,7 +542,7 @@ Example operators below are using all connectivity options. Note connection id from the operator matches the :envvar:`AIRFLOW_CONN_{CONN_ID}` postfix uppercase. This is standard AIRFLOW notation for defining connection via environment variables): -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py :language: python :start-after: [START howto_operator_cloudsql_query_operators] :end-before: [END howto_operator_cloudsql_query_operators] @@ -550,7 +550,7 @@ standard AIRFLOW notation for defining connection via environment variables): Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_query_template_fields] diff --git a/docs/howto/operator/google/cloud/cloud_storage_transfer_service.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst similarity index 81% rename from docs/howto/operator/google/cloud/cloud_storage_transfer_service.rst rename to docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst index c33fc01cd6d16..71ed070ba87c0 100644 --- a/docs/howto/operator/google/cloud/cloud_storage_transfer_service.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst @@ -27,7 +27,7 @@ Google Cloud Transfer Service Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudDataTransferServiceCreateJobOperator: @@ -52,7 +52,7 @@ The function accepts time in two formats: - as an :class:`~datetime.time` object -If you want to create a job transfer that copies data from AWS S3 then you must have a connection configured. Information about configuration for AWS is available: :doc:`/howto/connection/aws` +If you want to create a job transfer that copies data from AWS S3 then you must have a connection configured. Information about configuration for AWS is available: :doc:`apache-airflow:howto/connection/aws` The selected connection for AWS can be indicated by the parameter ``aws_conn_id``. For parameter definition, take a look at @@ -62,17 +62,17 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py :language: python :start-after: [START howto_operator_gcp_transfer_create_job_body_gcp] :end-before: [END howto_operator_gcp_transfer_create_job_body_gcp] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :start-after: [START howto_operator_gcp_transfer_create_job_body_aws] :end-before: [END howto_operator_gcp_transfer_create_job_body_aws] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_create_job] @@ -81,7 +81,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_create_template_fields] @@ -107,7 +107,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_delete_job] @@ -116,7 +116,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_delete_template_fields] @@ -142,12 +142,12 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py :language: python :start-after: [START howto_operator_gcp_transfer_update_job_body] :end-before: [END howto_operator_gcp_transfer_update_job_body] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_update_job] @@ -156,7 +156,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_update_template_fields] @@ -181,7 +181,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_cancel_operation] @@ -190,7 +190,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_cancel_template_fields] @@ -217,7 +217,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_get_operation] @@ -226,7 +226,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_get_template_fields] @@ -252,7 +252,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_list_operations] @@ -261,7 +261,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operations_list_template_fields] @@ -286,7 +286,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_pause_operation] @@ -295,7 +295,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_pause_template_fields] @@ -320,7 +320,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_resume_operation] @@ -329,7 +329,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_resume_template_fields] @@ -355,7 +355,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_wait_operation] @@ -364,7 +364,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +.. literalinclude:: /../../airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_sensor_template_fields] diff --git a/docs/howto/operator/google/cloud/compute.rst b/docs/apache-airflow-providers-google/operators/cloud/compute.rst similarity index 84% rename from docs/howto/operator/google/cloud/compute.rst rename to docs/apache-airflow-providers-google/operators/cloud/compute.rst index d318f26d35636..6e7ecfebe550b 100644 --- a/docs/howto/operator/google/cloud/compute.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/compute.rst @@ -27,7 +27,7 @@ Google Compute Engine Operators Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:ComputeEngineStartInstanceOperator: @@ -43,7 +43,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_start] @@ -52,7 +52,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection id used: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_start_no_project_id] @@ -62,7 +62,7 @@ from the Google Cloud connection id used: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_start_template_fields] @@ -89,7 +89,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_stop] @@ -98,7 +98,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_stop_no_project_id] @@ -107,7 +107,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_stop_template_fields] @@ -139,7 +139,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_set_machine_type] @@ -148,7 +148,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_set_machine_type_no_project_id] @@ -157,7 +157,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_set_machine_type_template_fields] @@ -185,12 +185,12 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute_igm.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute_igm.py :language: python :start-after: [START howto_operator_compute_template_copy_args] :end-before: [END howto_operator_compute_template_copy_args] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute_igm.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_copy_template] @@ -199,7 +199,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute_igm.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_copy_template_no_project_id] @@ -208,7 +208,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_template_copy_operator_template_fields] @@ -239,12 +239,12 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute_igm.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute_igm.py :language: python :start-after: [START howto_operator_compute_igm_update_template_args] :end-before: [END howto_operator_compute_igm_update_template_args] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute_igm.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_update_template] @@ -253,7 +253,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute_igm.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_update_template_no_project_id] @@ -263,7 +263,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_igm_update_template_operator_template_fields] diff --git a/docs/howto/operator/google/cloud/compute_ssh.rst b/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst similarity index 89% rename from docs/howto/operator/google/cloud/compute_ssh.rst rename to docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst index bce974e1a8860..a21627844f9fa 100644 --- a/docs/howto/operator/google/cloud/compute_ssh.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst @@ -18,7 +18,7 @@ Google Compute Engine SSH Operators -=================================================== +=================================== .. contents:: :depth: 1 @@ -27,7 +27,7 @@ Google Compute Engine SSH Operators Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:ComputeEngineSSHOperator: @@ -49,7 +49,7 @@ Please note that the target instance must allow tcp traffic on port 22. Below is the code to create the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute_ssh.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute_ssh.py :language: python :dedent: 4 :start-after: [START howto_execute_command_on_remote1] @@ -58,7 +58,7 @@ Below is the code to create the operator: You can also create the hook without project id - project id will be retrieved from the Google credentials used: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_compute_ssh.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_compute_ssh.py :language: python :dedent: 4 :start-after: [START howto_execute_command_on_remote2] diff --git a/docs/howto/operator/google/cloud/dlp.rst b/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst similarity index 92% rename from docs/howto/operator/google/cloud/dlp.rst rename to docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst index 5c40f1a622fdd..aed5bb9d32fd1 100644 --- a/docs/howto/operator/google/cloud/dlp.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst @@ -27,7 +27,7 @@ elements to help you better manage the data that you collect, store, or use for Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst Info-Types ^^^^^^^^^^ @@ -41,7 +41,7 @@ Create Stored Info-Type To create a custom info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateStoredInfoTypeOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_info_type] @@ -74,7 +74,7 @@ Update Stored Info-Type To update a info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPUpdateStoredInfoTypeOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_update_info_type] @@ -89,7 +89,7 @@ Deleting Stored Info-Type To delete a info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteStoredInfoTypeOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_info_type] @@ -116,7 +116,7 @@ Creating Template To create a inspection template you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateInspectTemplateOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_inspect_template] @@ -143,7 +143,7 @@ Using Template To find potentially sensitive info using the inspection template we just created, we can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPInspectContentOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_use_inspect_template] @@ -165,7 +165,7 @@ Deleting Template To delete the template you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteInspectTemplateOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_inspect_template] @@ -240,7 +240,7 @@ Creating Job Trigger To create a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateJobTriggerOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_job_trigger] @@ -265,7 +265,7 @@ Updating Job Trigger To update a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPUpdateJobTriggerOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_update_job_trigger] @@ -279,7 +279,7 @@ Deleting Job Trigger To delete a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteJobTriggerOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dlp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dlp.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_job_trigger] diff --git a/docs/howto/operator/google/cloud/datacatalog.rst b/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst similarity index 86% rename from docs/howto/operator/google/cloud/datacatalog.rst rename to docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst index a0892f1ea1995..0d036773bf9d4 100644 --- a/docs/howto/operator/google/cloud/datacatalog.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst @@ -36,7 +36,7 @@ Google Cloud. It offers: Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudDataCatalogEntryOperators: @@ -63,7 +63,7 @@ operators. The ``CloudDataCatalogGetEntryOperator`` use Project ID, Entry Group ID, Entry ID to get the entry. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry] @@ -75,7 +75,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_result] @@ -83,7 +83,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The ``CloudDataCatalogLookupEntryOperator`` use the resource name to get the entry. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_lookup_entry_linked_resource] @@ -95,7 +95,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_lookup_entry_result] @@ -109,7 +109,7 @@ Creating an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator` operator create the entry. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs] @@ -121,7 +121,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs_result2] @@ -129,7 +129,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created entry ID can be read with the ``entry_id`` key. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs_result] @@ -143,7 +143,7 @@ Updating an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator` operator update the entry. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_entry] @@ -161,7 +161,7 @@ Deleting a entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator` operator delete the entry. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_entry] @@ -190,7 +190,7 @@ Creating an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator` operator create the entry group. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group] @@ -202,7 +202,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group_result2] @@ -210,7 +210,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created entry group ID can be read with the ``entry_group_id`` key. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group_result2] @@ -224,7 +224,7 @@ Getting an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator` operator get the entry group. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_group] @@ -236,7 +236,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_group_result] @@ -250,7 +250,7 @@ Deleting an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator` operator delete the entry group. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_entry_group] @@ -279,7 +279,7 @@ Creating a tag templates The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator` operator get the tag template. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template] @@ -291,7 +291,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_result2] @@ -299,7 +299,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created tag template ID can be read with the ``tag_template_id`` key. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_result] @@ -313,7 +313,7 @@ Deleting a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator` operator delete the tag template. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template] @@ -332,7 +332,7 @@ Getting a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator` operator get the tag template. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_tag_template] @@ -344,7 +344,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_tag_template_result] @@ -358,7 +358,7 @@ Updating a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator` operator update the tag template. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag_template] @@ -387,7 +387,7 @@ Creating a tag on an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator` operator get the tag template. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag] @@ -399,7 +399,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_result2] @@ -407,7 +407,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created tag ID can be read with the ``tag_id`` key. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group_result2] @@ -421,7 +421,7 @@ Updating an tag The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator` operator update the tag template. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag_template] @@ -439,7 +439,7 @@ Deleting an tag The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator` operator delete the tag template. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template] @@ -457,7 +457,7 @@ Listing an tags on an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogListTagsOperator` operator get list of the tags on the entry. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_list_tags] @@ -469,7 +469,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_list_tags_result] @@ -495,7 +495,7 @@ Creating a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator` operator get the tag template field. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_field] @@ -507,7 +507,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_field_result2] @@ -515,7 +515,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created field ID can be read with the ``tag_template_field_id`` key. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group_result2] @@ -529,7 +529,7 @@ Renaming a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator` operator rename the tag template field. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_rename_tag_template_field] @@ -547,7 +547,7 @@ Updating a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator` operator get the tag template field. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag_template_field] @@ -566,7 +566,7 @@ Deleting a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator` operator delete the tag template field. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template_field] @@ -587,7 +587,7 @@ operator searches Data Catalog for multiple resources like entries, tags that ma The ``query`` parameters should defined using `search syntax `__. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_search_catalog] @@ -599,7 +599,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datacatalog.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datacatalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_search_catalog_result] diff --git a/docs/howto/operator/google/cloud/datafusion.rst b/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst similarity index 90% rename from docs/howto/operator/google/cloud/datafusion.rst rename to docs/apache-airflow-providers-google/operators/cloud/datafusion.rst index 5a311519337b5..a6b067e648e3b 100644 --- a/docs/howto/operator/google/cloud/datafusion.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst @@ -33,7 +33,7 @@ and action. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudDataFusionRestartInstanceOperator: @@ -44,7 +44,7 @@ Restart DataFusion Instance To restart Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionRestartInstanceOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_restart_instance_operator] @@ -63,7 +63,7 @@ Delete DataFusion Instance To delete Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionDeleteInstanceOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_delete_instance_operator] @@ -83,7 +83,7 @@ Create DataFusion Instance To create Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionCreateInstanceOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_create_instance_operator] @@ -103,7 +103,7 @@ Update DataFusion Instance To update Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionUpdateInstanceOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_update_instance_operator] @@ -122,7 +122,7 @@ Get DataFusion Instance To retrieve Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionGetInstanceOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_get_instance_operator] @@ -142,7 +142,7 @@ Create a DataFusion pipeline To create Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionCreatePipelineOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_create_pipeline] @@ -161,7 +161,7 @@ Start a DataFusion pipeline To start Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStartPipelineOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline] @@ -180,7 +180,7 @@ Stop a DataFusion pipeline To stop Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStopPipelineOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_stop_pipeline] @@ -199,7 +199,7 @@ Delete a DataFusion pipeline To delete Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionDeletePipelineOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_delete_pipeline] @@ -219,7 +219,7 @@ List DataFusion pipelines To list Data Fusion pipelines use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionListPipelinesOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datafusion.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_list_pipelines] diff --git a/docs/howto/operator/google/cloud/dataprep.rst b/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst similarity index 89% rename from docs/howto/operator/google/cloud/dataprep.rst rename to docs/apache-airflow-providers-google/operators/cloud/dataprep.rst index 699b6ccb2b0fe..8f8e6c0fe8e2a 100644 --- a/docs/howto/operator/google/cloud/dataprep.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst @@ -28,7 +28,7 @@ Before using Dataprep within Airflow you need to authenticate your account with To get connection Dataprep with Airflow you need Dataprep token. Please follow Dataprep `instructions `_ to do it. TOKEN should be added to the Connection in Airflow in JSON format. -You can check `how to do such connection `_. +You can check :doc:`apache-airflow:howto/connection/index` The DataprepRunJobGroupOperator will run specified job. Operator required a recipe id. To identify the recipe id please use `API documentation for runJobGroup `_ E.g. if the URL is /flows/10?recipe=7, the recipe id is 7. The recipe cannot be created via this operator. It can be created only via UI which is available `here `_. @@ -49,7 +49,7 @@ Set values for these fields: Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:DataprepRunJobGroupOperator: @@ -64,7 +64,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataprep.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_run_job_group_operator] @@ -82,7 +82,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataprep.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_get_jobs_for_job_group_operator] @@ -101,7 +101,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataprep.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_get_job_group_operator] diff --git a/docs/howto/operator/google/cloud/dataproc.rst b/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst similarity index 84% rename from docs/howto/operator/google/cloud/dataproc.rst rename to docs/apache-airflow-providers-google/operators/cloud/dataproc.rst index 7b14a3fb08a89..02d6e8732001b 100644 --- a/docs/howto/operator/google/cloud/dataproc.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst @@ -32,7 +32,7 @@ For more information about the service visit `Dataproc production documentation Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:DataprocCreateClusterOperator: @@ -46,7 +46,7 @@ For more information about the available fields to pass when creating a cluster, A cluster configuration can look as followed: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster] @@ -55,7 +55,7 @@ A cluster configuration can look as followed: With this configuration we can create the cluster: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator] @@ -69,7 +69,7 @@ For more information on updateMask and other parameters take a look at `Dataproc An example of a new cluster config and the updateMask: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_updatemask_cluster_operator] @@ -78,7 +78,7 @@ An example of a new cluster config and the updateMask: To update a cluster you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocUpdateClusterOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_update_cluster_operator] @@ -91,7 +91,7 @@ To delete a cluster you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocDeleteClusterOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_cluster_operator] @@ -110,7 +110,7 @@ file system. You can specify a file:/// path to refer to a local file on a clust The job configuration can be submitted by using: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocSubmitJobOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_submit_job_to_cluster_operator] @@ -125,7 +125,7 @@ There are more arguments to provide in the jobs than the examples show. For the Example of the configuration for a PySpark Job: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_pyspark_config] @@ -133,7 +133,7 @@ Example of the configuration for a PySpark Job: Example of the configuration for a SparkSQl Job: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_sparksql_config] @@ -141,7 +141,7 @@ Example of the configuration for a SparkSQl Job: Example of the configuration for a Spark Job: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_spark_config] @@ -149,7 +149,7 @@ Example of the configuration for a Spark Job: Example of the configuration for a Hive Job: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_hive_config] @@ -157,7 +157,7 @@ Example of the configuration for a Hive Job: Example of the configuration for a Hadoop Job: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_hadoop_config] @@ -165,7 +165,7 @@ Example of the configuration for a Hadoop Job: Example of the configuration for a Pig Job: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_pig_config] @@ -174,7 +174,7 @@ Example of the configuration for a Pig Job: Example of the configuration for a SparkR: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_dataproc.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_dataproc.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_sparkr_config] diff --git a/docs/howto/operator/google/cloud/datastore.rst b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst similarity index 81% rename from docs/howto/operator/google/cloud/datastore.rst rename to docs/apache-airflow-providers-google/operators/cloud/datastore.rst index a73f4265eac2d..35215531d5727 100644 --- a/docs/howto/operator/google/cloud/datastore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst @@ -31,7 +31,7 @@ For more information about the service visit Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudDatastoreExportEntitiesOperator: @@ -42,7 +42,7 @@ Export Entities To export entities from Google Cloud Datastore to Cloud Storage use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 4 :start-after: [START how_to_export_task] @@ -56,7 +56,7 @@ Import Entities To import entities from Cloud Storage to Google Cloud Datastore use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 4 :start-after: [START how_to_import_task] @@ -70,7 +70,7 @@ Allocate Ids To allocate IDs for incomplete keys use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreAllocateIdsOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 4 :start-after: [START how_to_allocate_ids] @@ -78,7 +78,7 @@ To allocate IDs for incomplete keys use An example of a partial keys required by the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 0 :start-after: [START how_to_keys_def] @@ -92,7 +92,7 @@ Begin transaction To begin a new transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreBeginTransactionOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 4 :start-after: [START how_to_begin_transaction] @@ -100,7 +100,7 @@ To begin a new transaction use An example of a transaction options required by the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 0 :start-after: [START how_to_transaction_def] @@ -114,7 +114,7 @@ Commit transaction To commit a transaction, optionally creating, deleting or modifying some entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCommitOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 4 :start-after: [START how_to_commit_task] @@ -122,7 +122,7 @@ use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCo An example of a commit information required by the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 0 :start-after: [START how_to_commit_def] @@ -136,7 +136,7 @@ Run query To run a query for entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRunQueryOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 4 :start-after: [START how_to_run_query] @@ -144,7 +144,7 @@ To run a query for entities use An example of a query required by the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 0 :start-after: [START how_to_query_def] @@ -158,7 +158,7 @@ Roll back transaction To roll back a transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRollbackOperator` -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_datastore.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_datastore.py :language: python :dedent: 4 :start-after: [START how_to_rollback_transaction] diff --git a/docs/howto/operator/google/cloud/functions.rst b/docs/apache-airflow-providers-google/operators/cloud/functions.rst similarity index 88% rename from docs/howto/operator/google/cloud/functions.rst rename to docs/apache-airflow-providers-google/operators/cloud/functions.rst index 006d2f66e71b0..b63cf509b53a5 100644 --- a/docs/howto/operator/google/cloud/functions.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/functions.rst @@ -27,7 +27,7 @@ Google Cloud Functions Operators Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudFunctionDeleteFunctionOperator: @@ -42,7 +42,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_functions.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_delete] @@ -51,7 +51,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/functions.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/functions.py :language: python :dedent: 4 :start-after: [START gcf_function_delete_template_fields] @@ -81,7 +81,7 @@ Arguments When a DAG is created, the default_args dictionary can be used to pass arguments common with other tasks: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_functions.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_functions.py :language: python :start-after: [START howto_operator_gcf_default_args] :end-before: [END howto_operator_gcf_default_args] @@ -105,19 +105,19 @@ Using the operator Depending on the combination of parameters, the Function's source code can be obtained from different sources: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_functions.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_functions.py :language: python :start-after: [START howto_operator_gcf_deploy_body] :end-before: [END howto_operator_gcf_deploy_body] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_functions.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_functions.py :language: python :start-after: [START howto_operator_gcf_deploy_variants] :end-before: [END howto_operator_gcf_deploy_variants] The code to create the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_functions.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_deploy] @@ -126,7 +126,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_functions.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_deploy_no_project_id] @@ -135,7 +135,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/functions.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/functions.py :language: python :dedent: 4 :start-after: [START gcf_function_deploy_template_fields] diff --git a/docs/howto/operator/google/cloud/gcs.rst b/docs/apache-airflow-providers-google/operators/cloud/gcs.rst similarity index 88% rename from docs/howto/operator/google/cloud/gcs.rst rename to docs/apache-airflow-providers-google/operators/cloud/gcs.rst index b33f739e3318e..0853b0c37fca4 100644 --- a/docs/howto/operator/google/cloud/gcs.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/gcs.rst @@ -27,7 +27,7 @@ Google Cloud Storage Operators Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GCSToBigQueryOperator: @@ -38,7 +38,7 @@ Use the :class:`~airflow.providers.google.cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator` to execute a BigQuery load job. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py :language: python :start-after: [START howto_operator_gcs_to_bigquery] :end-before: [END howto_operator_gcs_to_bigquery] @@ -56,7 +56,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_bucket_create_acl_entry_task] @@ -65,7 +65,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/gcs.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/gcs.py :language: python :dedent: 4 :start-after: [START gcs_bucket_create_acl_template_fields] @@ -90,7 +90,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_object_create_acl_entry_task] @@ -99,7 +99,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/gcs.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/gcs.py :language: python :dedent: 4 :start-after: [START gcs_object_create_acl_template_fields] @@ -128,7 +128,7 @@ Deleting Bucket allows you to remove bucket object from the Google Cloud Storage It is performed through the :class:`~airflow.providers.google.cloud.operators.gcs.GCSDeleteBucketOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_delete_bucket] diff --git a/docs/howto/operator/google/cloud/index.rst b/docs/apache-airflow-providers-google/operators/cloud/index.rst similarity index 100% rename from docs/howto/operator/google/cloud/index.rst rename to docs/apache-airflow-providers-google/operators/cloud/index.rst diff --git a/docs/howto/operator/google/cloud/kubernetes_engine.rst b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst similarity index 91% rename from docs/howto/operator/google/cloud/kubernetes_engine.rst rename to docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst index 394ca0b6279e2..c32764ecf5375 100644 --- a/docs/howto/operator/google/cloud/kubernetes_engine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst @@ -31,7 +31,7 @@ consists of multiple machines (specifically, Compute Engine instances) grouped t Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst Manage GKE cluster ^^^^^^^^^^^^^^^^^^ @@ -47,7 +47,7 @@ Create GKE cluster Here is an example of a cluster definition: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py :language: python :start-after: [START howto_operator_gcp_gke_create_cluster_definition] :end-before: [END howto_operator_gcp_gke_create_cluster_definition] @@ -57,7 +57,7 @@ A dict object like this, or a definition, is required when creating a cluster with :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateClusterOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_cluster] @@ -72,7 +72,7 @@ To delete a cluster, use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteClusterOperator`. This would also delete all the nodes allocated to the cluster. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_cluster] @@ -121,7 +121,7 @@ is the path ``/airflow/xcom``. To provide values to the XCom, ensure your Pod wr ``return.json`` in the sidecar. The contents of this can then be used downstream in your DAG. Here is an example of it being used: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_pod_xcom] @@ -129,7 +129,7 @@ Here is an example of it being used: And then use it in other operators: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_xcom_result] diff --git a/docs/howto/operator/google/cloud/life_sciences.rst b/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst similarity index 88% rename from docs/howto/operator/google/cloud/life_sciences.rst rename to docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst index 0d92d1c32b65c..1a0fb4cd90db5 100644 --- a/docs/howto/operator/google/cloud/life_sciences.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst @@ -31,7 +31,7 @@ and biomedical data at scale. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst Pipeline Configuration @@ -39,7 +39,7 @@ Pipeline Configuration In order to run the pipeline, it is necessary to configure the request body. Here is an example of the pipeline configuration with a single action. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_life_sciences.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_configure_simple_action_pipeline] @@ -47,7 +47,7 @@ Here is an example of the pipeline configuration with a single action. The pipeline can also be configured with multiple action. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_life_sciences.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_configure_multiple_action_pipeline] @@ -64,7 +64,7 @@ Use the :class:`~airflow.providers.google.cloud.operators.life_sciences.LifeSciencesRunPipelineOperator` to execute pipelines. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_life_sciences.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_run_pipeline] diff --git a/docs/howto/operator/google/cloud/mlengine.rst b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst similarity index 87% rename from docs/howto/operator/google/cloud/mlengine.rst rename to docs/apache-airflow-providers-google/operators/cloud/mlengine.rst index 0b3dd66f816ed..cd986c07eaae1 100644 --- a/docs/howto/operator/google/cloud/mlengine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst @@ -44,7 +44,7 @@ This creates a virtual machine that can run code specified in the trainer file, contains the main application code. A job can be initiated with the :class:`~airflow.providers.google.cloud.operators.mlengine.MLEngineStartTrainingJobOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_training] @@ -59,7 +59,7 @@ A model is a container that can hold multiple model versions. A new model can be The ``model`` field should be defined with a dictionary containing the information about the model. ``name`` is a required field in this dictionary. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_create_model] @@ -73,7 +73,7 @@ The :class:`~airflow.providers.google.cloud.operators.mlengine.MLEngineGetModelO can be used to obtain a model previously created. To obtain the correct model, ``model_name`` must be defined in the operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_get_model] @@ -84,7 +84,7 @@ fields to dynamically determine their values. The result are saved to :ref:`XCom allowing them to be used by other operators. In this case, the :class:`~airflow.operators.bash.BashOperator` is used to print the model information. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_print_model] @@ -100,7 +100,7 @@ The model must be specified by ``model_name``, and the ``version`` parameter sho all the information about the version. Within the ``version`` parameter’s dictionary, the ``name`` field is required. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_create_version1] @@ -109,7 +109,7 @@ required. The :class:`~airflow.providers.google.cloud.operators.mlengine.MLEngineCreateVersionOperator` can also be used to create more versions with varying parameters. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_create_version2] @@ -124,7 +124,7 @@ By default, the model code will run using the default model version. You can set :class:`~airflow.providers.google.cloud.operators.mlengine.MLEngineSetDefaultVersionOperator` by specifying the ``model_name`` and ``version_name`` parameters. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_default_version] @@ -134,7 +134,7 @@ To list the model versions available, use the :class:`~airflow.providers.google.cloud.operators.mlengine.MLEngineListVersionsOperator` while specifying the ``model_name`` parameter. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_list_versions] @@ -145,7 +145,7 @@ fields to dynamically determine their values. The result are saved to :ref:`XCom allowing them to be used by other operators. In this case, the :class:`~airflow.operators.bash.BashOperator` is used to print the version information. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_print_versions] @@ -160,7 +160,7 @@ A Google Cloud AI Platform prediction job can be started with the For specifying the model origin, you need to provide either the ``model_name``, ``uri``, or ``model_name`` and ``version_name``. If you do not provide the ``version_name``, the operator will use the default model version. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_get_prediction] @@ -175,7 +175,7 @@ A model version can be deleted with the :class:`~airflow.providers.google.cloud.operators.mlengine.MLEngineDeleteVersionOperator` by the ``version_name`` and ``model_name`` parameters. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_delete_version] @@ -185,7 +185,7 @@ You can also delete a model with the :class:`~airflow.providers.google.cloud.operators.mlengine.MLEngineDeleteModelOperator` by providing the ``model_name`` parameter. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_delete_model] @@ -197,7 +197,7 @@ To evaluate a prediction and model, specify a metric function to generate a summ the evaluation of the model. This function receives a dictionary derived from a json in the batch prediction result, then returns a tuple of metrics. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_get_metric] @@ -207,7 +207,7 @@ To evaluate a prediction and model, it’s useful to have a function to validate This function receives a dictionary of the averaged metrics the function above generated. It then raises an exception if a task fails or should not proceed. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_validate_error] @@ -218,7 +218,7 @@ Prediction results and a model summary can be generated through a function such It makes predictions using the specified inputs and then summarizes and validates the result. The functions created above should be passed in through the ``metric_fn_and_keys`` and ``validate_fn`` fields. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mlengine.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_evaluate] diff --git a/docs/howto/operator/google/cloud/natural_language.rst b/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst similarity index 87% rename from docs/howto/operator/google/cloud/natural_language.rst rename to docs/apache-airflow-providers-google/operators/cloud/natural_language.rst index c25c2942430d8..8ca2b7ac326fe 100644 --- a/docs/howto/operator/google/cloud/natural_language.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst @@ -35,7 +35,7 @@ messaging app. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudNaturalLanguageDocuments: @@ -48,14 +48,14 @@ representing text. Here is an example of document with text provided as a string: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :start-after: [START howto_operator_gcp_natural_language_document_text] :end-before: [END howto_operator_gcp_natural_language_document_text] In addition to supplying string, a document can refer to content stored in Google Cloud Storage. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :start-after: [START howto_operator_gcp_natural_language_document_gcs] :end-before: [END howto_operator_gcp_natural_language_document_gcs] @@ -70,7 +70,7 @@ public figures, landmarks, etc.), and returns information about those entities. Entity analysis is performed with the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entities] @@ -81,7 +81,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entities_result] @@ -98,7 +98,7 @@ as positive, negative, or neutral. Sentiment analysis is performed through the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entity_sentiment] @@ -109,7 +109,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entity_sentiment_result] @@ -127,7 +127,7 @@ through the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_sentiment] @@ -138,7 +138,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_sentiment_result] @@ -155,7 +155,7 @@ content in a document, use the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator` operator. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_classify_text] @@ -166,7 +166,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_natural_language.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_classify_text_result] diff --git a/docs/howto/operator/google/cloud/pubsub.rst b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst similarity index 85% rename from docs/howto/operator/google/cloud/pubsub.rst rename to docs/apache-airflow-providers-google/operators/cloud/pubsub.rst index 295ad222ec5ff..75e865358c317 100644 --- a/docs/howto/operator/google/cloud/pubsub.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst @@ -35,7 +35,7 @@ By decoupling senders and receivers Google Cloud PubSub allows developers to com Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:PubSubCreateTopicOperator: @@ -45,7 +45,7 @@ Creating a PubSub topic The PubSub topic is a named resource to which messages are sent by publishers. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubCreateTopicOperator` operator creates a topic. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_create_topic] :end-before: [END howto_operator_gcp_pubsub_create_topic] @@ -60,7 +60,7 @@ A ``Subscription`` is a named resource representing the stream of messages from to be delivered to the subscribing application. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubCreateSubscriptionOperator` operator creates the subscription. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_create_subscription] :end-before: [END howto_operator_gcp_pubsub_create_subscription] @@ -74,7 +74,7 @@ Publishing PubSub messages A ``Message`` is a combination of data and (optional) attributes that a publisher sends to a topic and is eventually delivered to subscribers. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubPublishMessageOperator` operator would publish messages. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_publish] :end-before: [END howto_operator_gcp_pubsub_publish] @@ -87,24 +87,24 @@ Pulling messages from a PubSub subscription The :class:`~airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor` sensor pulls messages from a PubSub subscription and pass them through XCom. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_message_with_sensor] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_sensor] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_message_with_operator] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_operator] To pull messages from XCom use the :class:`~airflow.operators.bash.BashOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_messages_result_cmd] :end-before: [END howto_operator_gcp_pubsub_pull_messages_result_cmd] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_messages_result] :end-before: [END howto_operator_gcp_pubsub_pull_messages_result] @@ -117,7 +117,7 @@ Deleting a PubSub subscription The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubDeleteSubscriptionOperator` operator deletes the subscription. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_unsubscribe] :end-before: [END howto_operator_gcp_pubsub_unsubscribe] @@ -130,7 +130,7 @@ Deleting a PubSub topic The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubDeleteTopicOperator` operator deletes topic. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_pubsub.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_delete_topic] :end-before: [END howto_operator_gcp_pubsub_delete_topic] diff --git a/docs/howto/operator/google/cloud/spanner.rst b/docs/apache-airflow-providers-google/operators/cloud/spanner.rst similarity index 88% rename from docs/howto/operator/google/cloud/spanner.rst rename to docs/apache-airflow-providers-google/operators/cloud/spanner.rst index 81c299564647f..d4b6fbc63c6ca 100644 --- a/docs/howto/operator/google/cloud/spanner.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/spanner.rst @@ -27,7 +27,7 @@ Google Cloud Spanner Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:SpannerDeployInstanceOperator: @@ -45,7 +45,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_spanner.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_deploy] @@ -54,7 +54,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_deploy_template_fields] @@ -84,7 +84,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_spanner.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_delete] @@ -93,7 +93,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_delete_template_fields] @@ -124,7 +124,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_spanner.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_deploy] @@ -133,7 +133,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_database_deploy_template_fields] @@ -168,13 +168,13 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_spanner.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_update] :end-before: [END howto_operator_spanner_database_update] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_spanner.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_update_idempotent] @@ -183,7 +183,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_database_update_template_fields] @@ -211,7 +211,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_spanner.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_query] @@ -220,7 +220,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_query_template_fields] @@ -250,7 +250,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_spanner.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_delete] @@ -259,7 +259,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_delete_template_fields] diff --git a/docs/howto/operator/google/cloud/speech_to_text.rst b/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst similarity index 85% rename from docs/howto/operator/google/cloud/speech_to_text.rst rename to docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst index 1b652980b03de..7ba5fce6d5ebb 100644 --- a/docs/howto/operator/google/cloud/speech_to_text.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst @@ -22,7 +22,7 @@ Google Cloud Speech to Text Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudSpeechToTextRecognizeSpeechOperator: @@ -42,14 +42,14 @@ google.cloud.speech_v1.types module for more information, see: https://googleapis.github.io/google-cloud-python/latest/speech/gapic/v1/api.html#google.cloud.speech_v1.SpeechClient.recognize -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_speech_to_text.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_speech_to_text.py :language: python :start-after: [START howto_operator_text_to_speech_api_arguments] :end-before: [END howto_operator_text_to_speech_api_arguments] filename is a simple string argument: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_speech_to_text.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_speech_to_text.py :language: python :start-after: [START howto_operator_speech_to_text_api_arguments] :end-before: [END howto_operator_speech_to_text_api_arguments] @@ -57,7 +57,7 @@ filename is a simple string argument: Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_speech_to_text.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_speech_to_text.py :language: python :dedent: 4 :start-after: [START howto_operator_speech_to_text_recognize] @@ -66,7 +66,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/speech_to_text.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/speech_to_text.py :language: python :dedent: 4 :start-after: [START gcp_speech_to_text_synthesize_template_fields] diff --git a/docs/howto/operator/google/cloud/stackdriver.rst b/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst similarity index 89% rename from docs/howto/operator/google/cloud/stackdriver.rst rename to docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst index 43e1b75d5d33f..fb0f38d0b8fa6 100644 --- a/docs/howto/operator/google/cloud/stackdriver.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst @@ -27,7 +27,7 @@ Google Cloud Stackdriver Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:StackdriverListAlertPoliciesOperator: @@ -44,7 +44,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_list_alert_policy] @@ -64,7 +64,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_enable_alert_policy] @@ -84,7 +84,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_alert_policy] @@ -105,7 +105,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_upsert_alert_policy] @@ -124,7 +124,7 @@ Using the operator The name of the alert to be deleted should be given in the format projects//alertPolicies/ -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_delete_alert_policy] @@ -144,7 +144,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_list_notification_channel] @@ -164,7 +164,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_enable_notification_channel] @@ -184,7 +184,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_notification_channel] @@ -205,7 +205,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_notification_channel] @@ -224,7 +224,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_stackdriver.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_delete_notification_channel] diff --git a/docs/howto/operator/google/cloud/text_to_speech.rst b/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst similarity index 85% rename from docs/howto/operator/google/cloud/text_to_speech.rst rename to docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst index 2b257b08fab17..07b6285b1db97 100644 --- a/docs/howto/operator/google/cloud/text_to_speech.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst @@ -22,7 +22,7 @@ Google Cloud Text to Speech Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudTextToSpeechSynthesizeOperator: @@ -42,14 +42,14 @@ The ``input``, ``voice`` and ``audio_config`` arguments need to be dicts or obje for more information, see: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/api.html#google.cloud.texttospeech_v1.TextToSpeechClient.synthesize_speech -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_text_to_speech.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_text_to_speech.py :language: python :start-after: [START howto_operator_text_to_speech_api_arguments] :end-before: [END howto_operator_text_to_speech_api_arguments] The ``filename`` argument is a simple string argument: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_text_to_speech.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_text_to_speech.py :language: python :start-after: [START howto_operator_text_to_speech_gcp_filename] :end-before: [END howto_operator_text_to_speech_gcp_filename] @@ -57,7 +57,7 @@ The ``filename`` argument is a simple string argument: Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_text_to_speech.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_text_to_speech.py :language: python :dedent: 4 :start-after: [START howto_operator_text_to_speech_synthesize] @@ -66,7 +66,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/text_to_speech.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/text_to_speech.py :language: python :dedent: 4 :start-after: [START gcp_text_to_speech_synthesize_template_fields] diff --git a/docs/howto/operator/google/cloud/translate.rst b/docs/apache-airflow-providers-google/operators/cloud/translate.rst similarity index 87% rename from docs/howto/operator/google/cloud/translate.rst rename to docs/apache-airflow-providers-google/operators/cloud/translate.rst index 3eb188d39729a..d8176d6709c53 100644 --- a/docs/howto/operator/google/cloud/translate.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/translate.rst @@ -27,7 +27,7 @@ Google Cloud Translate Operators Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudTranslateTextOperator: @@ -44,7 +44,7 @@ Using the operator Basic usage of the operator: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_translate.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_translate.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_text] @@ -53,7 +53,7 @@ Basic usage of the operator: The result of translation is available as dictionary or array of dictionaries accessible via the usual XCom mechanisms of Airflow: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_translate.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_translate.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_access] @@ -63,7 +63,7 @@ XCom mechanisms of Airflow: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/translate.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/translate.py :language: python :dedent: 4 :start-after: [START translate_template_fields] diff --git a/docs/howto/operator/google/cloud/translate_speech.rst b/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst similarity index 88% rename from docs/howto/operator/google/cloud/translate_speech.rst rename to docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst index 0e800acac9dfe..bd6b107bc3745 100644 --- a/docs/howto/operator/google/cloud/translate_speech.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst @@ -25,7 +25,7 @@ Google Cloud Speech Translate Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudTranslateSpeechOperator: @@ -47,7 +47,7 @@ for more information, see: https://googleapis.github.io/google-cloud-python/late Arguments for translation need to be specified. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_translate_speech.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_translate_speech.py :language: python :start-after: [START howto_operator_translate_speech_arguments] :end-before: [END howto_operator_translate_speech_arguments] @@ -56,7 +56,7 @@ Arguments for translation need to be specified. Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_translate_speech.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_translate_speech.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_speech] @@ -65,7 +65,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/translate_speech.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/translate_speech.py :language: python :dedent: 4 :start-after: [START translate_speech_template_fields] diff --git a/docs/howto/operator/google/cloud/video_intelligence.rst b/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst similarity index 84% rename from docs/howto/operator/google/cloud/video_intelligence.rst rename to docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst index 5c76e5c415851..0877729216996 100644 --- a/docs/howto/operator/google/cloud/video_intelligence.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst @@ -42,7 +42,7 @@ Google Cloud Video Intelligence Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudVideoIntelligenceDetectVideoLabelsOperator: @@ -59,12 +59,12 @@ Using the operator Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_labels] @@ -72,7 +72,7 @@ Input uri is an uri to a file in Google Cloud Storage You can use the annotation output via Xcom: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_labels_result] @@ -81,7 +81,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_labels_template_fields] @@ -108,7 +108,7 @@ Arguments Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] @@ -116,7 +116,7 @@ Input uri is an uri to a file in Google Cloud Storage Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_explicit_content] @@ -124,7 +124,7 @@ Using the operator You can use the annotation output via Xcom: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_explicit_content_result] @@ -133,7 +133,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_explicit_content_template_fields] @@ -160,7 +160,7 @@ Arguments Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] @@ -168,7 +168,7 @@ Input uri is an uri to a file in Google Cloud Storage Using the operator """""""""""""""""" -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_video_shots] @@ -176,7 +176,7 @@ Using the operator You can use the annotation output via Xcom: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_video_intelligence.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_video_shots_result] @@ -185,7 +185,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_video_shots_template_fields] diff --git a/docs/howto/operator/google/cloud/vision.rst b/docs/apache-airflow-providers-google/operators/cloud/vision.rst similarity index 81% rename from docs/howto/operator/google/cloud/vision.rst rename to docs/apache-airflow-providers-google/operators/cloud/vision.rst index 00429d1e25636..0d9810a5c11ed 100644 --- a/docs/howto/operator/google/cloud/vision.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/vision.rst @@ -27,7 +27,7 @@ Google Cloud Vision Operators Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudVisionAddProductToProductSetOperator: @@ -46,17 +46,17 @@ We are using the :class:`~google.cloud.vision_v1.types.Product`, :class:`~google.cloud.vision_v1.types.ProductSet` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] @@ -64,7 +64,7 @@ Google libraries: If ``product_set_id`` and ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_add_product_to_product_set] @@ -72,7 +72,7 @@ If ``product_set_id`` and ``product_id`` was generated by the API it can be extr Otherwise it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_add_product_to_product_set_2] @@ -82,7 +82,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_add_product_to_product_set_template_fields] @@ -111,18 +111,18 @@ Using the operator We are using the :class:`~google.cloud.vision.enums` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_enums_import] :end-before: [END howto_operator_vision_enums_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_annotate_image] @@ -130,7 +130,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_annotate_image_result] @@ -140,7 +140,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_annotate_image_template_fields] @@ -173,24 +173,24 @@ Using the operator We are using the ``Product`` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product] :end-before: [END howto_operator_vision_product] The ``product_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_create] @@ -198,7 +198,7 @@ The ``product_id`` argument can be omitted (it will be generated by the API): Or it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_create_2] @@ -208,7 +208,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_create_template_fields] @@ -243,7 +243,7 @@ Using the operator If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_delete] @@ -251,7 +251,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_delete_2] @@ -260,7 +260,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_delete_template_fields] @@ -291,7 +291,7 @@ Using the operator If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_get] @@ -299,7 +299,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_get_2] @@ -308,7 +308,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_get_template_fields] @@ -335,24 +335,24 @@ Using the operator We are using the ``ProductSet`` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_set] :end-before: [END howto_operator_vision_product_set] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_create] @@ -360,7 +360,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_create_2] @@ -370,7 +370,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_create_template_fields] @@ -399,7 +399,7 @@ Using the operator If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_delete] @@ -407,7 +407,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_delete_2] @@ -416,7 +416,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_delete_template_fields] @@ -443,7 +443,7 @@ Using the operator If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_get] @@ -451,7 +451,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_get_2] @@ -460,7 +460,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_get_template_fields] @@ -499,12 +499,12 @@ Using the operator We are using the ``ProductSet`` object from the Google Cloud Vision library: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_set] :end-before: [END howto_operator_vision_product_set] @@ -513,7 +513,7 @@ Initialization of the task: If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_update] @@ -521,7 +521,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_update_2] @@ -530,7 +530,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_update_template_fields] @@ -580,19 +580,19 @@ Using the operator We are using the ``Product`` object from the Google Cloud Vision library: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product] :end-before: [END howto_operator_vision_product] If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_update] @@ -600,7 +600,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_update_2] @@ -609,7 +609,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_update_template_fields] @@ -636,24 +636,24 @@ Using the operator We are using the :class:`~google.cloud.vision_v1.types.ReferenceImage` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_reference_image_import] :end-before: [END howto_operator_vision_reference_image_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_reference_image] :end-before: [END howto_operator_vision_reference_image] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_create] @@ -661,7 +661,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_create_2] @@ -671,7 +671,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_reference_image_create_template_fields] @@ -698,24 +698,24 @@ Using the operator We are using the :class:`~google.cloud.vision_v1.types.ReferenceImage` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_reference_image_import] :end-before: [END howto_operator_vision_reference_image_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_reference_image] :end-before: [END howto_operator_vision_reference_image] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_delete] @@ -723,7 +723,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_delete_2] @@ -733,7 +733,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_reference_image_create_template_fields] @@ -762,17 +762,17 @@ We are using the :class:`~google.cloud.vision_v1.types.Product`, :class:`~google.cloud.vision_v1.types.ProductSet` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] @@ -780,7 +780,7 @@ Google libraries: If ``product_set_id`` and ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_remove_product_from_product_set] @@ -788,7 +788,7 @@ If ``product_set_id`` and ``product_id`` was generated by the API it can be extr Otherwise it can be specified explicitly: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_remove_product_from_product_set_2] @@ -798,7 +798,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_remove_product_from_product_set_template_fields] @@ -828,12 +828,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_text] @@ -841,7 +841,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_text_result] @@ -851,7 +851,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_text_set_template_fields] @@ -880,12 +880,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_document_detect_text] @@ -893,7 +893,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_document_detect_text_result] @@ -903,7 +903,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_document_detect_text_set_template_fields] @@ -933,12 +933,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_labels] @@ -946,7 +946,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_labels_result] @@ -956,7 +956,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_labels_template_fields] @@ -985,12 +985,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_safe_search] @@ -998,7 +998,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_vision.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_vision.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_safe_search_result] @@ -1008,7 +1008,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_safe_search_template_fields] diff --git a/docs/howto/operator/google/firebase/firestore.rst b/docs/apache-airflow-providers-google/operators/firebase/firestore.rst similarity index 93% rename from docs/howto/operator/google/firebase/firestore.rst rename to docs/apache-airflow-providers-google/operators/firebase/firestore.rst index 15fdfe0f08222..c29448a098978 100644 --- a/docs/howto/operator/google/firebase/firestore.rst +++ b/docs/apache-airflow-providers-google/operators/firebase/firestore.rst @@ -34,7 +34,7 @@ Cloud Functions. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:CloudFirestoreExportDatabaseOperator: @@ -45,7 +45,7 @@ Export database Exports a copy of all or a subset of documents from Google Cloud Firestore to Google Cloud Storage is performed with the :class:`~airflow.providers.google.firebase.operators.firestore.CloudFirestoreExportDatabaseOperator` operator. -.. exampleinclude:: /../airflow/providers/google/firebase/example_dags/example_firestore.py +.. exampleinclude:: /../../airflow/providers/google/firebase/example_dags/example_firestore.py :language: python :dedent: 4 :start-after: [START howto_operator_export_database_to_gcs] diff --git a/docs/howto/operator/google/index.rst b/docs/apache-airflow-providers-google/operators/index.rst similarity index 100% rename from docs/howto/operator/google/index.rst rename to docs/apache-airflow-providers-google/operators/index.rst diff --git a/docs/howto/operator/google/marketing_platform/analytics.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/analytics.rst similarity index 89% rename from docs/howto/operator/google/marketing_platform/analytics.rst rename to docs/apache-airflow-providers-google/operators/marketing_platform/analytics.rst index aae41e2a5c0b9..55f4c4cac8fa5 100644 --- a/docs/howto/operator/google/marketing_platform/analytics.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/analytics.rst @@ -30,7 +30,7 @@ For more information about the Google Analytics 360 API check Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GoogleAnalyticsListAccountsOperator: @@ -40,7 +40,7 @@ List the Accounts To list accounts from Analytics you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics.GoogleAnalyticsListAccountsOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_analytics.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_analytics.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_list_accounts_operator] @@ -58,7 +58,7 @@ Returns a web property-Google Ads link to which the user has access. To list web property-Google Ads link you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics.GoogleAnalyticsGetAdsLinkOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_analytics.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_analytics.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_get_ads_link_operator] @@ -76,7 +76,7 @@ Operator returns a list of entity Google Ads links. To list Google Ads links you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics.GoogleAnalyticsRetrieveAdsLinksListOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_analytics.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_analytics.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_retrieve_ads_links_list_operator] diff --git a/docs/howto/operator/google/marketing_platform/campaign_manager.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst similarity index 89% rename from docs/howto/operator/google/marketing_platform/campaign_manager.rst rename to docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst index 5e5cda3ba74b6..a69700beeec68 100644 --- a/docs/howto/operator/google/marketing_platform/campaign_manager.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst @@ -30,7 +30,7 @@ reports. For more information about the Campaign Manager API check Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GoogleCampaignManagerDeleteReportOperator: @@ -41,7 +41,7 @@ To delete Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerDeleteReportOperator`. It deletes a report by its unique ID. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_delete_report_operator] @@ -59,7 +59,7 @@ Downloading a report The :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerDownloadReportOperator`. allows you to download a Campaign Manager to Google Cloud Storage bucket. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_get_report_operator] @@ -77,7 +77,7 @@ Waiting for a report Report are generated asynchronously. To wait for report to be ready for downloading you can use :class:`~airflow.providers.google.marketing_platform.sensors.campaign_manager.GoogleCampaignManagerReportSensor`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_wait_for_operation] @@ -96,7 +96,7 @@ To insert a Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerInsertReportOperator`. Running this operator creates a new report. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_insert_report_operator] @@ -116,7 +116,7 @@ Running a report To run Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerRunReportOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_run_report_operator] @@ -135,7 +135,7 @@ Inserting a conversions To insert Campaign Manager conversions you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchInsertConversionsOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_insert_conversions] @@ -154,7 +154,7 @@ Updating a conversions To update Campaign Manager conversions you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchUpdateConversionsOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_update_conversions] diff --git a/docs/howto/operator/google/marketing_platform/display_video.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst similarity index 88% rename from docs/howto/operator/google/marketing_platform/display_video.rst rename to docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst index fd5f6880c27d4..42b0328ec1c2e 100644 --- a/docs/howto/operator/google/marketing_platform/display_video.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst @@ -27,7 +27,7 @@ campaign management features you need. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GoogleDisplayVideo360CreateReportOperator: @@ -37,7 +37,7 @@ Creating a report To create Display&Video 360 report use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360CreateReportOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_createquery_report_operator] @@ -57,7 +57,7 @@ Deleting a report To delete Display&Video 360 report use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DeleteReportOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_deletequery_report_operator] @@ -75,7 +75,7 @@ Waiting for report To wait for the report use :class:`~airflow.providers.google.marketing_platform.sensors.display_video.GoogleDisplayVideo360ReportSensor`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_wait_report_operator] @@ -93,7 +93,7 @@ Downloading a report To download a report to GCS bucket use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadReportOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_getquery_report_operator] @@ -112,7 +112,7 @@ Running a report To run Display&Video 360 report use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360RunReportOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_runquery_report_operator] @@ -141,7 +141,7 @@ The operator accepts body request: To download line items in CSV format report use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadLineItemsOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_download_line_items_operator] @@ -160,7 +160,7 @@ Upload line items To run Display&Video 360 uploading line items use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360UploadLineItemsOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_upload_line_items_operator] @@ -178,7 +178,7 @@ Create SDF download task To create SDF download task use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360CreateSDFDownloadTaskOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_create_sdf_download_task_operator] @@ -197,7 +197,7 @@ Save SDF files in the Google Cloud Storage To save SDF files and save them in the Google Cloud Storage use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360SDFtoGCSOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_save_sdf_in_gcs_operator] @@ -215,7 +215,7 @@ Waiting for SDF operation Wait for SDF operation is executed by: :class:`~airflow.providers.google.marketing_platform.sensors.display_video.GoogleDisplayVideo360GetSDFDownloadOperationSensor`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_wait_for_operation_sensor] diff --git a/docs/howto/operator/google/marketing_platform/index.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/index.rst similarity index 100% rename from docs/howto/operator/google/marketing_platform/index.rst rename to docs/apache-airflow-providers-google/operators/marketing_platform/index.rst diff --git a/docs/howto/operator/google/marketing_platform/search_ads.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst similarity index 88% rename from docs/howto/operator/google/marketing_platform/search_ads.rst rename to docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst index 2a152e055770d..fe490eea2f4ca 100644 --- a/docs/howto/operator/google/marketing_platform/search_ads.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst @@ -28,7 +28,7 @@ For more information check `Google Search Ads `, which allows it to be used by other operators: -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_search_ads.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_get_report_id] @@ -64,7 +64,7 @@ Awaiting for a report To wait for a report to be ready for download use :class:`~airflow.providers.google.marketing_platform.sensors.search_ads.GoogleSearchAdsReportSensor`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_search_ads.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_get_report_operator] @@ -82,7 +82,7 @@ Downloading a report To download a Search Ads report to Google Cloud Storage bucket use the :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsDownloadReportOperator`. -.. exampleinclude:: /../airflow/providers/google/marketing_platform/example_dags/example_search_ads.py +.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_getfile_report_operator] diff --git a/docs/howto/operator/google/suite/sheets.rst b/docs/apache-airflow-providers-google/operators/suite/sheets.rst similarity index 89% rename from docs/howto/operator/google/suite/sheets.rst rename to docs/apache-airflow-providers-google/operators/suite/sheets.rst index f79d97792eef4..c49b84bdbac22 100644 --- a/docs/howto/operator/google/suite/sheets.rst +++ b/docs/apache-airflow-providers-google/operators/suite/sheets.rst @@ -39,7 +39,7 @@ For more information check `official documentation ` with To get the URL of newly created spreadsheet use XCom value: -.. exampleinclude:: /../airflow/providers/google/suite/example_dags/example_sheets.py +.. exampleinclude:: /../../airflow/providers/google/suite/example_dags/example_sheets.py :language: python :dedent: 4 :start-after: [START print_spreadsheet_url] diff --git a/docs/howto/operator/google/transfer/azure_fileshare_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst similarity index 95% rename from docs/howto/operator/google/transfer/azure_fileshare_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst index 3d8738678da9f..9eac6e3d96614 100644 --- a/docs/howto/operator/google/transfer/azure_fileshare_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst @@ -38,7 +38,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_azure_fileshare_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_azure_fileshare_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_azure_fileshare_to_gcs_basic] diff --git a/docs/howto/operator/google/transfer/facebook_ads_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst similarity index 90% rename from docs/howto/operator/google/transfer/facebook_ads_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst index deb8bef137167..c58a750b3e048 100644 --- a/docs/howto/operator/google/transfer/facebook_ads_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst @@ -27,7 +27,7 @@ Facebook Ads To GCS Operators Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:FacebookAdsReportToGcsOperator: @@ -38,7 +38,7 @@ Use the :class:`~airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator` to execute a Facebook ads report fetch and load to GCS. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py :language: python :start-after: [START howto_operator_facebook_ads_to_gcs] :end-before: [END howto_operator_facebook_ads_to_gcs] diff --git a/docs/howto/operator/google/transfer/gcs_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst similarity index 90% rename from docs/howto/operator/google/transfer/gcs_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst index 0077681b0b3fc..317397b901687 100644 --- a/docs/howto/operator/google/transfer/gcs_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst @@ -49,7 +49,7 @@ from the source after they are transferred to the sink. Source objects can be specified using include and exclusion prefixes, as well as based on the file modification date. -If you need information on how to use it, look at the guide: :doc:`/howto/operator/google/cloud/cloud_storage_transfer_service` +If you need information on how to use it, look at the guide: :doc:`/operators/cloud/cloud_storage_transfer_service` Local transfer ~~~~~~~~~~~~~~ @@ -61,7 +61,7 @@ In the next section they will be described. Prerequisite Tasks ------------------ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst Operators @@ -94,7 +94,7 @@ Copy single file The following example would copy a single file, ``OBJECT_1`` from the ``BUCKET_1_SRC`` GCS bucket to the ``BUCKET_1_DST`` bucket. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_single_file] @@ -105,7 +105,7 @@ Copy multiple files There are several ways to copy multiple files, various examples of which are presented following. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_wildcard] @@ -115,7 +115,7 @@ The ``source_object`` value may contain one wild card, denoted as "*". All files be copied. In this example, all root level files ending with ``.txt`` in ``BUCKET_1_SRC`` will be copied to the ``data`` folder in ``BUCKET_1_DST``, with file names unchanged. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_delimiter] @@ -125,7 +125,7 @@ The delimiter filed may be specified to select any source files starting with `` value supplied to ``delimiter``. This example uses the ``delimiter`` value to implement the same functionality as the prior example. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_list] @@ -141,7 +141,7 @@ Move single file Supplying ``True`` to the ``move`` argument causes the operator to delete ``source_object`` once the copy is complete. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_single_file_move] @@ -153,7 +153,7 @@ Move multiple files Multiple files may be moved by supplying ``True`` to the ``move`` argument. The same rules concerning wild cards and the ``delimiter`` argument apply to moves as well as copies. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_list_move] @@ -186,7 +186,7 @@ The following example will ensure all files in ``BUCKET_1_SRC``, including any i ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST`` if they already exist. It will not delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_bucket] @@ -199,7 +199,7 @@ This example will ensure all files in ``BUCKET_1_SRC``, including any in subdire ``BUCKET_1_DST``. It will overwrite identically named files in ``BUCKET_1_DST`` if they already exist. It will delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_full_bucket] @@ -212,7 +212,7 @@ The following example will ensure all files in ``BUCKET_1_SRC``, including any i ``subdir`` folder in ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST/subdir`` if they already exist and it will not delete any files in ``BUCKET_1_DST/subdir`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_to_subdir] @@ -225,7 +225,7 @@ This example will ensure all files in ``BUCKET_1_SRC/subdir``, including any in in ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST`` if they already exist and it will not delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC/subdir``. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_sync_from_subdir] diff --git a/docs/howto/operator/google/transfer/gcs_to_gdrive.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst similarity index 89% rename from docs/howto/operator/google/transfer/gcs_to_gdrive.rst rename to docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst index 83bc5aa4b09d0..4c43f8f1f874e 100644 --- a/docs/howto/operator/google/transfer/gcs_to_gdrive.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst @@ -33,7 +33,7 @@ document editor, file sharing mechanisms. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GCSToGoogleDriveOperator: @@ -52,7 +52,7 @@ Copy single files The following Operator would copy a single file. -.. exampleinclude:: /../airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py +.. exampleinclude:: /../../airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_copy_single_file] @@ -63,7 +63,7 @@ Copy multiple files The following Operator would copy all the multiples files (i.e. using wildcard). -.. exampleinclude:: /../airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py +.. exampleinclude:: /../../airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_copy_files] @@ -75,7 +75,7 @@ Move files Using the ``move_object`` parameter allows you to move the files. After copying the file to Google Drive, the original file from the bucket is deleted. -.. exampleinclude:: /../airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py +.. exampleinclude:: /../../airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_move_files] diff --git a/docs/howto/operator/google/transfer/gcs_to_local.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst similarity index 91% rename from docs/howto/operator/google/transfer/gcs_to_local.rst rename to docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst index 057dd5bcb3da8..3f8cf83df5ff6 100644 --- a/docs/howto/operator/google/transfer/gcs_to_local.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst @@ -29,7 +29,7 @@ This page shows how to download data from GCS to local filesystem. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GCSToLocalFilesystemOperator: @@ -42,7 +42,7 @@ data from GCS to local filesystem. Below is an example of using this operator to upload a file to GCS. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_local.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_local.py :language: python :dedent: 0 :start-after: [START howto_operator_gcs_download_file_task] diff --git a/docs/howto/operator/google/transfer/gcs_to_sftp.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst similarity index 88% rename from docs/howto/operator/google/transfer/gcs_to_sftp.rst rename to docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst index 0a3a9e32827b8..0e4524986f688 100644 --- a/docs/howto/operator/google/transfer/gcs_to_sftp.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst @@ -32,7 +32,7 @@ It runs over the SSH protocol. It supports the full security and authentication Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GCSToSFTPOperator: @@ -53,7 +53,7 @@ Copying a single file The following Operator copies a single file. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_copy_single_file] @@ -66,7 +66,7 @@ To move the file use the ``move_object`` parameter. Once the file is copied to S the original file from the Google Storage is deleted. The ``destination_path`` parameter defines the full path of the file on the SFTP server. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_move_single_file_destination] @@ -78,7 +78,7 @@ Copying a directory Use the ``wildcard`` in ``source_path`` parameter to copy a directory. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_copy_directory] @@ -90,7 +90,7 @@ Moving specific files Use the ``wildcard`` in ``source_path`` parameter to move the specific files. The ``destination_path`` defines the path that is prefixed to all copied files. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_move_specific_files] diff --git a/docs/howto/operator/google/transfer/gcs_to_sheets.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst similarity index 92% rename from docs/howto/operator/google/transfer/gcs_to_sheets.rst rename to docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst index a6481c53d21d9..252295b1d0333 100644 --- a/docs/howto/operator/google/transfer/gcs_to_sheets.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst @@ -32,7 +32,7 @@ common spreadsheet tasks. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GCSToGoogleSheets: @@ -42,7 +42,7 @@ Upload data from GCS to Google Sheets To upload data from Google Cloud Storage to Google Spreadsheet you can use the :class:`~airflow.providers.google.suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator`. -.. exampleinclude:: /../airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py +.. exampleinclude:: /../../airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py :language: python :dedent: 4 :start-after: [START upload_gcs_to_sheets] diff --git a/docs/howto/operator/google/transfer/index.rst b/docs/apache-airflow-providers-google/operators/transfer/index.rst similarity index 100% rename from docs/howto/operator/google/transfer/index.rst rename to docs/apache-airflow-providers-google/operators/transfer/index.rst diff --git a/docs/howto/operator/google/transfer/local_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst similarity index 92% rename from docs/howto/operator/google/transfer/local_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst index a9a023bc1d38e..c648f4586ca95 100644 --- a/docs/howto/operator/google/transfer/local_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst @@ -29,7 +29,7 @@ This page shows how to upload data from local filesystem to GCS. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:LocalFilesystemToGCSOperator: @@ -43,7 +43,7 @@ When you use this operator, you can optionally compress the data being uploaded. Below is an example of using this operator to upload a file to GCS. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_local_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_local_to_gcs.py :language: python :dedent: 0 :start-after: [START howto_operator_local_filesystem_to_gcs] diff --git a/docs/howto/operator/google/transfer/mysql_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst similarity index 95% rename from docs/howto/operator/google/transfer/mysql_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst index 196cafdfd00ae..cb02654c340c9 100644 --- a/docs/howto/operator/google/transfer/mysql_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst @@ -43,7 +43,7 @@ When you use this operator, you can optionally compress the data being uploaded Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py :language: python :dedent: 0 :start-after: [START howto_operator_mysql_to_gcs] diff --git a/docs/howto/operator/google/transfer/presto_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst similarity index 92% rename from docs/howto/operator/google/transfer/presto_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst index 91ed06d5213cd..c462ee9abaa6c 100644 --- a/docs/howto/operator/google/transfer/presto_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst @@ -49,7 +49,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_basic] @@ -67,7 +67,7 @@ You can specify these options by the ``export_format`` parameter. If you want a CSV file to be created, your operator call might look like this: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_csv] @@ -81,7 +81,7 @@ will be dumped from the database and upload to the bucket. If you want to create a schema file, then an example operator call might look like this: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_multiple_types] @@ -102,7 +102,7 @@ maximum allowed file size for a single object. If you want to create 10 MB files, your code might look like this: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_read_data_from_gcs_many_chunks] @@ -123,7 +123,7 @@ For example, if you want to create an external table that allows you to create q read data directly from GCS, then you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateExternalTableOperator`. Using this operator looks like this: -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_create_external_table_multiple_types] diff --git a/docs/howto/operator/google/transfer/s3_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst similarity index 100% rename from docs/howto/operator/google/transfer/s3_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst diff --git a/docs/howto/operator/google/transfer/salesforce_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst similarity index 90% rename from docs/howto/operator/google/transfer/salesforce_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst index ffe093fb47374..58ae62f882794 100644 --- a/docs/howto/operator/google/transfer/salesforce_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst @@ -25,7 +25,7 @@ Salesforce To GCS Operators Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:SalesforceToGcsOperator: @@ -36,7 +36,7 @@ Use the :class:`~airflow.providers.google.cloud.transfers.salesforce_to_gcs.SalesforceToGcsOperator` to execute a Salesforce query to fetch data and load it to GCS. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py :language: python :start-after: [START howto_operator_salesforce_to_gcs] :end-before: [END howto_operator_salesforce_to_gcs] diff --git a/docs/howto/operator/google/transfer/sftp_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst similarity index 89% rename from docs/howto/operator/google/transfer/sftp_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst index c687efd25b29d..9bb48db97767d 100644 --- a/docs/howto/operator/google/transfer/sftp_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst @@ -32,7 +32,7 @@ It runs over the SSH protocol. It supports the full security and authentication Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:SFTPToGCSOperator: @@ -51,7 +51,7 @@ Copying single files The following Operator copies a single file. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_copy_single_file] @@ -64,7 +64,7 @@ To move the file use the ``move_object`` parameter. Once the file is copied to G the original file from the SFTP is deleted. The ``destination_path`` parameter defines the full path of the file in the bucket. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_move_single_file_destination] @@ -76,7 +76,7 @@ Copying directory Use the ``wildcard`` in ``source_path`` parameter to copy the directory. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_copy_directory] @@ -92,7 +92,7 @@ e.g. ``tests_sftp_hook_dir/subdir/parent-1.bin`` is copied to ``specific_files/p and ``tests_sftp_hook_dir/subdir/parent-2.bin`` is copied to ``specific_files/parent-2.bin`` . ``tests_sftp_hook_dir/subdir/parent-3.txt`` is skipped. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_move_specific_files] diff --git a/docs/howto/operator/google/transfer/sheets_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst similarity index 92% rename from docs/howto/operator/google/transfer/sheets_to_gcs.rst rename to docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst index 1ff63aa670dc1..8125fa7961bce 100644 --- a/docs/howto/operator/google/transfer/sheets_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst @@ -32,7 +32,7 @@ common spreadsheet tasks. Prerequisite Tasks ^^^^^^^^^^^^^^^^^^ -.. include:: /howto/operator/google/_partials/prerequisite_tasks.rst +.. include:: /operators/_partials/prerequisite_tasks.rst .. _howto/operator:GoogleSheetsToGCSOperator: @@ -42,7 +42,7 @@ Upload data from Google Sheets to GCS To upload data from Google Spreadsheet to Google Cloud Storage you can use the :class:`~airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`. -.. exampleinclude:: /../airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_sheet_to_gcs] diff --git a/docs/apache-airflow-providers-google/secrets-backends/google-cloud-secret-manager-backend.rst b/docs/apache-airflow-providers-google/secrets-backends/google-cloud-secret-manager-backend.rst new file mode 100644 index 0000000000000..1435d7350ef8d --- /dev/null +++ b/docs/apache-airflow-providers-google/secrets-backends/google-cloud-secret-manager-backend.rst @@ -0,0 +1,191 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _google_cloud_secret_manager_backend: + +Google Cloud Secret Manager Backend +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This topic describes how to configure Airflow to use `Secret Manager `__ as +a secret backend and how to manage secrets. + +Before you begin +"""""""""""""""" + +Before you start, make sure you have performed the following tasks: + +1. Include sendgrid subpackage as part of your Airflow installation + + .. code-block:: bash + + pip install apache-airflow[google] + +2. `Configure Secret Manager and your local environment `__, once per project. + +Enabling the secret backend +""""""""""""""""""""""""""" + +To enable the secret backend for Google Cloud Secrets Manager to retrieve connection/variables, +specify :py:class:`~airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend` +as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. + +Here is a sample configuration if you want to use it: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend + +You can also set this with environment variables. + +.. code-block:: bash + + export AIRFLOW__SECRETS__BACKEND=airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend + +You can verify the correct setting of the configuration options with the ``airflow config get-value`` command. + +.. code-block:: console + + $ airflow config get-value secrets backend + airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend + +Backend parameters +"""""""""""""""""" + +The next step is to configure backend parameters using the ``backend_kwargs`` options. You can pass +the following parameters: + +* ``connections_prefix``: Specifies the prefix of the secret to read to get Connections. Default: ``"airflow-connections"`` +* ``variables_prefix``: Specifies the prefix of the secret to read to get Variables. Default: ``"airflow-variables"`` +* ``gcp_key_path``: Path to Google Cloud Service Account Key file (JSON). +* ``gcp_keyfile_dict``: Dictionary of keyfile parameters. +* ``gcp_scopes``: Comma-separated string containing OAuth2 scopes. +* ``sep``: Separator used to concatenate connections_prefix and conn_id. Default: ``"-"`` +* ``project_id``: Project ID to read the secrets from. If not passed, the project ID from credentials will be used. + +All options should be passed as a JSON dictionary. + +For example, if you want to set parameter ``connections_prefix`` to ``"airflow-tenant-primary"`` and parameter ``variables_prefix`` to ``"variables_prefix"``, your configuration file should look like this: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend + backend_kwargs = {"connections_prefix": "airflow-tenant-primary", "variables_prefix": "airflow-tenant-primary"} + +Set-up credentials +"""""""""""""""""" + +You can configure the credentials in three ways: + +* By default, Application Default Credentials (ADC) is used obtain credentials. +* ``gcp_key_path`` option in ``backend_kwargs`` option - allows you to configure authorizations with a service account stored in local file. +* ``gcp_keyfile_dict`` option in ``backend_kwargs`` option - allows you to configure authorizations with a service account stored in Airflow configuration. + +.. note:: + + For more information about the Application Default Credentials (ADC), see: + + * `google.auth.default `__ + * `Setting Up Authentication for Server to Server Production Applications `__ + +Managing secrets +"""""""""""""""" + +If you want to configure a connection, you need to save it as a :ref:`connection URI representation `. +Variables should be saved as plain text. + +In order to manage secrets, you can use the ``gcloud`` tool or other supported tools. For more information, take a look at: +`Managing secrets `__ in Google Cloud Documentation. + +The name of the secret must fit the following formats: + + * for connection: ``[variable_prefix][sep][connection_name]`` + * for variable: ``[connections_prefix][sep][variable_name]`` + * for Airflow config: ``[config_prefix][sep][config_name]`` + +where: + + * ``connections_prefix`` - fixed value defined in the ``connections_prefix`` parameter in backend configuration. Default: ``airflow-connections``. + * ``variable_prefix`` - fixed value defined in the ``variable_prefix`` parameter in backend configuration. Default: ``airflow-variables``. + * ``config_prefix`` - fixed value defined in the ``config_prefix`` parameter in backend configuration. Default: ``airflow-config``. + * ``sep`` - fixed value defined in the ``sep`` parameter in backend configuration. Default: ``-``. + +The Cloud Secrets Manager secret name should follow the pattern ``^[a-zA-Z0-9-_]*$``. + +If you have the default backend configuration and you want to create a connection with ``conn_id`` +equals ``first-connection``, you should create secret named ``airflow-connections-first-connection``. +You can do it with the gcloud tools as in the example below. + +.. code-block:: bash + + $ echo "mysql://example.org" | gcloud beta secrets create \ + airflow-connections-first-connection \ + --data-file=- \ + --replication-policy=automatic + Created version [1] of the secret [airflow-variables-first-connection]. + +If you have the default backend configuration and you want to create a variable named ``first-variable``, +you should create a secret named ``airflow-variables-first-variable``. You can do it with the gcloud +command as in the example below. + +.. code-block:: bash + + $ echo "secret_content" | gcloud beta secrets create \ + airflow-variables-first-variable \ + --data-file=-\ + --replication-policy=automatic + Created version [1] of the secret [airflow-variables-first-variable]. + +Checking configuration +====================== + +You can use the ``airflow connections get`` command to check if the connection is correctly read from the backend secret: + +.. code-block:: console + + $ airflow connections get first-connection + Id: null + Conn Id: first-connection + Conn Type: mysql + Host: example.org + Schema: '' + Login: null + Password: null + Port: null + Is Encrypted: null + Is Extra Encrypted: null + Extra: {} + URI: mysql://example.org + +To check the variables is correctly read from the backend secret, you can use ``airflow variables get``: + +.. code-block:: console + + $ airflow variables get first-variable + secret_content + +Clean up +======== + +To avoid incurring charges to your Google Cloud account for the resources used in this guide, +delete secrets by running ``gcloud beta secrets delete``: + +.. code-block:: bash + + gcloud beta secrets delete airflow-connections-first-connection + gcloud beta secrets delete airflow-variables-first-variable diff --git a/docs/apache-airflow-providers-grpc/index.rst b/docs/apache-airflow-providers-grpc/index.rst new file mode 100644 index 0000000000000..e6400768a91aa --- /dev/null +++ b/docs/apache-airflow-providers-grpc/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-grpc`` +================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/grpc/index> diff --git a/docs/apache-airflow-providers-hashicorp/index.rst b/docs/apache-airflow-providers-hashicorp/index.rst new file mode 100644 index 0000000000000..532d0813f3a64 --- /dev/null +++ b/docs/apache-airflow-providers-hashicorp/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-hashicorp`` +====================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/hashicorp/index> diff --git a/docs/apache-airflow-providers-http/index.rst b/docs/apache-airflow-providers-http/index.rst new file mode 100644 index 0000000000000..fa5dc916105fc --- /dev/null +++ b/docs/apache-airflow-providers-http/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-http`` +================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/http/index> diff --git a/docs/apache-airflow-providers-imap/index.rst b/docs/apache-airflow-providers-imap/index.rst new file mode 100644 index 0000000000000..3521e9556fc1f --- /dev/null +++ b/docs/apache-airflow-providers-imap/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-imap`` +================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/imap/index> diff --git a/docs/apache-airflow-providers-jdbc/index.rst b/docs/apache-airflow-providers-jdbc/index.rst new file mode 100644 index 0000000000000..7bf75d86488c6 --- /dev/null +++ b/docs/apache-airflow-providers-jdbc/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-jdbc`` +================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/jdbc/index> diff --git a/docs/apache-airflow-providers-jenkins/index.rst b/docs/apache-airflow-providers-jenkins/index.rst new file mode 100644 index 0000000000000..4013f07244ace --- /dev/null +++ b/docs/apache-airflow-providers-jenkins/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-jenkins`` +==================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/jenkins/index> diff --git a/docs/apache-airflow-providers-jira/index.rst b/docs/apache-airflow-providers-jira/index.rst new file mode 100644 index 0000000000000..9c8af0b848aee --- /dev/null +++ b/docs/apache-airflow-providers-jira/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-jira`` +================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/jira/index> diff --git a/docs/apache-airflow-providers-microsoft-azure/index.rst b/docs/apache-airflow-providers-microsoft-azure/index.rst new file mode 100644 index 0000000000000..f7c7c3c01bd68 --- /dev/null +++ b/docs/apache-airflow-providers-microsoft-azure/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-microsoft-azure`` +============================================ + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/microsoft/azure/index> diff --git a/docs/apache-airflow-providers-microsoft-mssql/index.rst b/docs/apache-airflow-providers-microsoft-mssql/index.rst new file mode 100644 index 0000000000000..32ae107e7dbf0 --- /dev/null +++ b/docs/apache-airflow-providers-microsoft-mssql/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-microsoft-mssql`` +============================================ + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/microsoft/mssql/index> diff --git a/docs/apache-airflow-providers-microsoft-winrm/index.rst b/docs/apache-airflow-providers-microsoft-winrm/index.rst new file mode 100644 index 0000000000000..f9d02d9b9a5cd --- /dev/null +++ b/docs/apache-airflow-providers-microsoft-winrm/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-microsoft-winrm`` +============================================ + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/microsoft/winrm/index> diff --git a/docs/apache-airflow-providers-mongo/index.rst b/docs/apache-airflow-providers-mongo/index.rst new file mode 100644 index 0000000000000..d034f4ee8397a --- /dev/null +++ b/docs/apache-airflow-providers-mongo/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-mongo`` +================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/mongo/index> diff --git a/docs/apache-airflow-providers-mysql/index.rst b/docs/apache-airflow-providers-mysql/index.rst new file mode 100644 index 0000000000000..a75b096d35f0f --- /dev/null +++ b/docs/apache-airflow-providers-mysql/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-mysql`` +================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/mysql/index> diff --git a/docs/apache-airflow-providers-odbc/index.rst b/docs/apache-airflow-providers-odbc/index.rst new file mode 100644 index 0000000000000..ef32f64838ceb --- /dev/null +++ b/docs/apache-airflow-providers-odbc/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-odbc`` +================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/odbc/index> diff --git a/docs/apache-airflow-providers-openfaas/index.rst b/docs/apache-airflow-providers-openfaas/index.rst new file mode 100644 index 0000000000000..39d8113293a4d --- /dev/null +++ b/docs/apache-airflow-providers-openfaas/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-openfaas`` +===================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/openfaas/index> diff --git a/docs/apache-airflow-providers-opsgenie/index.rst b/docs/apache-airflow-providers-opsgenie/index.rst new file mode 100644 index 0000000000000..b036b9152f9df --- /dev/null +++ b/docs/apache-airflow-providers-opsgenie/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-opsgenie`` +===================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/opsgenie/index> diff --git a/docs/apache-airflow-providers-oracle/index.rst b/docs/apache-airflow-providers-oracle/index.rst new file mode 100644 index 0000000000000..aa8a7082f5d0e --- /dev/null +++ b/docs/apache-airflow-providers-oracle/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-oracle`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/oracle/index> diff --git a/docs/apache-airflow-providers-pagerduty/index.rst b/docs/apache-airflow-providers-pagerduty/index.rst new file mode 100644 index 0000000000000..65f7e4744ab9e --- /dev/null +++ b/docs/apache-airflow-providers-pagerduty/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-pagerduty`` +====================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/pagerduty/index> diff --git a/docs/apache-airflow-providers-papermill/index.rst b/docs/apache-airflow-providers-papermill/index.rst new file mode 100644 index 0000000000000..70a1d7a09beda --- /dev/null +++ b/docs/apache-airflow-providers-papermill/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-papermill`` +====================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/papermill/index> diff --git a/docs/apache-airflow-providers-plexus/index.rst b/docs/apache-airflow-providers-plexus/index.rst new file mode 100644 index 0000000000000..bf30827d89602 --- /dev/null +++ b/docs/apache-airflow-providers-plexus/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-plexus`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/plexus/index> diff --git a/docs/apache-airflow-providers-postgres/index.rst b/docs/apache-airflow-providers-postgres/index.rst new file mode 100644 index 0000000000000..b1ea986a87657 --- /dev/null +++ b/docs/apache-airflow-providers-postgres/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-postgres`` +===================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/postgres/index> diff --git a/docs/apache-airflow-providers-presto/index.rst b/docs/apache-airflow-providers-presto/index.rst new file mode 100644 index 0000000000000..8b274b32720d4 --- /dev/null +++ b/docs/apache-airflow-providers-presto/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-presto`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/presto/index> diff --git a/docs/apache-airflow-providers-qubole/index.rst b/docs/apache-airflow-providers-qubole/index.rst new file mode 100644 index 0000000000000..af556edae5e5b --- /dev/null +++ b/docs/apache-airflow-providers-qubole/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-qubole`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/qubole/index> diff --git a/docs/apache-airflow-providers-redis/index.rst b/docs/apache-airflow-providers-redis/index.rst new file mode 100644 index 0000000000000..154b1f0b21c6c --- /dev/null +++ b/docs/apache-airflow-providers-redis/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-redis`` +================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/redis/index> diff --git a/docs/apache-airflow-providers-salesforce/index.rst b/docs/apache-airflow-providers-salesforce/index.rst new file mode 100644 index 0000000000000..6e9a8b9498a66 --- /dev/null +++ b/docs/apache-airflow-providers-salesforce/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-salesforce`` +======================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/salesforce/index> diff --git a/docs/apache-airflow-providers-samba/index.rst b/docs/apache-airflow-providers-samba/index.rst new file mode 100644 index 0000000000000..ac30f21133cfb --- /dev/null +++ b/docs/apache-airflow-providers-samba/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-samba`` +================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/samba/index> diff --git a/docs/apache-airflow-providers-segment/index.rst b/docs/apache-airflow-providers-segment/index.rst new file mode 100644 index 0000000000000..0d0d313a66a4f --- /dev/null +++ b/docs/apache-airflow-providers-segment/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-segment`` +==================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/segment/index> diff --git a/docs/apache-airflow-providers-sendgrid/index.rst b/docs/apache-airflow-providers-sendgrid/index.rst new file mode 100644 index 0000000000000..63d2340df5d07 --- /dev/null +++ b/docs/apache-airflow-providers-sendgrid/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-sendgrid`` +===================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/sendgrid/index> diff --git a/docs/apache-airflow-providers-sftp/index.rst b/docs/apache-airflow-providers-sftp/index.rst new file mode 100644 index 0000000000000..2c12187a3bbb6 --- /dev/null +++ b/docs/apache-airflow-providers-sftp/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-sftp`` +================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/sftp/index> diff --git a/docs/apache-airflow-providers-singularity/index.rst b/docs/apache-airflow-providers-singularity/index.rst new file mode 100644 index 0000000000000..90c94bb7af549 --- /dev/null +++ b/docs/apache-airflow-providers-singularity/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-singularity`` +======================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/singularity/index> diff --git a/docs/apache-airflow-providers-slack/index.rst b/docs/apache-airflow-providers-slack/index.rst new file mode 100644 index 0000000000000..329098cae3c62 --- /dev/null +++ b/docs/apache-airflow-providers-slack/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-slack`` +================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/slack/index> diff --git a/docs/apache-airflow-providers-snowflake/index.rst b/docs/apache-airflow-providers-snowflake/index.rst new file mode 100644 index 0000000000000..2eb244b06af88 --- /dev/null +++ b/docs/apache-airflow-providers-snowflake/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-snowflake`` +====================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/snowflake/index> diff --git a/docs/apache-airflow-providers-sqlite/index.rst b/docs/apache-airflow-providers-sqlite/index.rst new file mode 100644 index 0000000000000..ae0e8707f1569 --- /dev/null +++ b/docs/apache-airflow-providers-sqlite/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-sqlite`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/sqlite/index> diff --git a/docs/apache-airflow-providers-ssh/index.rst b/docs/apache-airflow-providers-ssh/index.rst new file mode 100644 index 0000000000000..c3a7f59614a14 --- /dev/null +++ b/docs/apache-airflow-providers-ssh/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-ssh`` +================================ + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/ssh/index> diff --git a/docs/apache-airflow-providers-vertica/index.rst b/docs/apache-airflow-providers-vertica/index.rst new file mode 100644 index 0000000000000..ccb6e11e6d466 --- /dev/null +++ b/docs/apache-airflow-providers-vertica/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-vertica`` +==================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/vertica/index> diff --git a/docs/apache-airflow-providers-yandex/index.rst b/docs/apache-airflow-providers-yandex/index.rst new file mode 100644 index 0000000000000..86b361804fdc5 --- /dev/null +++ b/docs/apache-airflow-providers-yandex/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-yandex`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/yandex/index> diff --git a/docs/apache-airflow-providers-zendesk/index.rst b/docs/apache-airflow-providers-zendesk/index.rst new file mode 100644 index 0000000000000..d8ec3645a08a4 --- /dev/null +++ b/docs/apache-airflow-providers-zendesk/index.rst @@ -0,0 +1,29 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-zendesk`` +==================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/zendesk/index> diff --git a/docs/autoapi_templates/index.rst b/docs/autoapi_templates/index.rst index df581b7d95ad4..c0550558cadd9 100644 --- a/docs/autoapi_templates/index.rst +++ b/docs/autoapi_templates/index.rst @@ -66,157 +66,6 @@ All operators are in the following packages: airflow/sensors/index - airflow/providers/amazon/aws/operators/index - - airflow/providers/amazon/aws/sensors/index - - airflow/providers/amazon/aws/transfers/index - - airflow/providers/apache/cassandra/sensors/index - - airflow/providers/apache/druid/operators/index - - airflow/providers/apache/druid/transfers/index - - airflow/providers/apache/hdfs/sensors/index - - airflow/providers/apache/hive/operators/index - - airflow/providers/apache/hive/sensors/index - - airflow/providers/apache/hive/transfers/index - - airflow/providers/apache/kylin/operators/index - - airflow/providers/apache/livy/operators/index - - airflow/providers/apache/livy/sensors/index - - airflow/providers/apache/pig/operators/index - - airflow/providers/apache/spark/operators/index - - airflow/providers/apache/sqoop/operators/index - - airflow/providers/celery/sensors/index - - airflow/providers/cncf/kubernetes/operators/index - - airflow/providers/cncf/kubernetes/sensors/index - - airflow/providers/databricks/operators/index - - airflow/providers/datadog/sensors/index - - airflow/providers/dingding/operators/index - - airflow/providers/discord/operators/index - - airflow/providers/docker/operators/index - - airflow/providers/exasol/operators/index - - airflow/providers/ftp/sensors/index - - airflow/providers/google/ads/operators/index - - airflow/providers/google/ads/transfers/index - - airflow/providers/google/cloud/operators/index - - airflow/providers/google/cloud/sensors/index - - airflow/providers/google/cloud/transfers/index - - airflow/providers/google/firebase/operators/index - - airflow/providers/google/marketing_platform/operators/index - - airflow/providers/google/marketing_platform/sensors/index - - airflow/providers/google/suite/operators/index - - airflow/providers/google/suite/transfers/index - - airflow/providers/grpc/operators/index - - airflow/providers/hashicorp/hooks/index - - airflow/providers/http/operators/index - - airflow/providers/http/sensors/index - - airflow/providers/imap/sensors/index - - airflow/providers/jdbc/operators/index - - airflow/providers/jenkins/operators/index - - airflow/providers/jira/operators/index - - airflow/providers/jira/sensors/index - - airflow/providers/microsoft/azure/operators/index - - airflow/providers/microsoft/azure/sensors/index - - airflow/providers/microsoft/azure/transfers/index - - airflow/providers/microsoft/mssql/operators/index - - airflow/providers/microsoft/winrm/operators/index - - airflow/providers/mongo/sensors/index - - airflow/providers/mysql/operators/index - - airflow/providers/mysql/transfers/index - - airflow/providers/opsgenie/operators/index - - airflow/providers/oracle/operators/index - - airflow/providers/oracle/transfers/index - - airflow/providers/papermill/operators/index - - airflow/providers/plexus/operators/index - - airflow/providers/postgres/operators/index - - airflow/providers/qubole/operators/index - - airflow/providers/qubole/sensors/index - - airflow/providers/redis/operators/index - - airflow/providers/redis/sensors/index - - airflow/providers/salesforce/operators/index - - airflow/providers/salesforce/sensors/index - - airflow/providers/segment/operators/index - - airflow/providers/sftp/operators/index - - airflow/providers/sftp/sensors/index - - airflow/providers/singularity/operators/index - - airflow/providers/slack/operators/index - - airflow/providers/snowflake/operators/index - - airflow/providers/snowflake/transfers/index - - airflow/providers/sqlite/operators/index - - airflow/providers/ssh/operators/index - - airflow/providers/vertica/operators/index - - airflow/providers/yandex/operators/index .. _pythonapi:hooks: @@ -237,128 +86,6 @@ All hooks are in the following packages: airflow/hooks/index - airflow/providers/amazon/aws/hooks/index - - airflow/providers/apache/cassandra/hooks/index - - airflow/providers/apache/druid/hooks/index - - airflow/providers/apache/hdfs/hooks/index - - airflow/providers/apache/hive/hooks/index - - airflow/providers/apache/kylin/hooks/index - - airflow/providers/apache/livy/hooks/index - - airflow/providers/apache/pig/hooks/index - - airflow/providers/apache/pinot/hooks/index - - airflow/providers/apache/spark/hooks/index - - airflow/providers/apache/sqoop/hooks/index - - airflow/providers/cloudant/hooks/index - - airflow/providers/cncf/kubernetes/hooks/index - - airflow/providers/databricks/hooks/index - - airflow/providers/datadog/hooks/index - - airflow/providers/discord/hooks/index - - airflow/providers/dingding/hooks/index - - airflow/providers/docker/hooks/index - - airflow/providers/elasticsearch/hooks/index - - airflow/providers/exasol/hooks/index - - airflow/providers/facebook/ads/hooks/index - - airflow/providers/ftp/hooks/index - - airflow/providers/google/ads/hooks/index - - airflow/providers/google/cloud/hooks/index - - airflow/providers/google/common/hooks/index - - airflow/providers/google/firebase/hooks/index - - airflow/providers/google/marketing_platform/hooks/index - - airflow/providers/google/suite/hooks/index - - airflow/providers/grpc/hooks/index - - airflow/providers/hashicorp/hooks/index - - airflow/providers/http/hooks/index - - airflow/providers/imap/hooks/index - - airflow/providers/jdbc/hooks/index - - airflow/providers/jenkins/hooks/index - - airflow/providers/jira/hooks/index - - airflow/providers/microsoft/azure/hooks/index - - airflow/providers/microsoft/mssql/hooks/index - - airflow/providers/microsoft/winrm/hooks/index - - airflow/providers/mongo/hooks/index - - airflow/providers/mysql/hooks/index - - airflow/providers/odbc/hooks/index - - airflow/providers/openfaas/hooks/index - - airflow/providers/opsgenie/hooks/index - - airflow/providers/oracle/hooks/index - - airflow/providers/pagerduty/hooks/index - - airflow/providers/plexus/hooks/index - - airflow/providers/postgres/hooks/index - - airflow/providers/presto/hooks/index - - airflow/providers/qubole/hooks/index - - airflow/providers/redis/hooks/index - - airflow/providers/salesforce/hooks/index - - airflow/providers/samba/hooks/index - - airflow/providers/segment/hooks/index - - airflow/providers/sftp/hooks/index - - airflow/providers/slack/hooks/index - - airflow/providers/snowflake/hooks/index - - airflow/providers/sqlite/hooks/index - - airflow/providers/ssh/hooks/index - - airflow/providers/vertica/hooks/index - - airflow/providers/zendesk/hooks/index - - airflow/providers/yandex/hooks/index - Executors --------- Executors are the mechanism by which task instances get run. All executors are @@ -410,95 +137,3 @@ All secrets backends derive from :class:`~airflow.secrets.BaseSecretsBackend`. :maxdepth: 1 airflow/secrets/index - - airflow/providers/amazon/aws/secrets/index - airflow/providers/hashicorp/secrets/index - airflow/providers/google/cloud/secrets/index - airflow/providers/microsoft/azure/secrets/index - -Task Log Handlers ------------------ -Task log handlers are python log handlers that handles and reads task instance logs. -All task log handlers are derived from :class:`~airflow.utils.log.file_task_handler.FileTaskHandler`. - -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 - - - airflow/providers/amazon/aws/log/index - airflow/providers/elasticsearch/log/index - airflow/providers/google/cloud/log/index - airflow/providers/microsoft/azure/log/index - -Providers ---------- - -Third party integrations are in a separate package :mod:`airflow.providers`. - -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 - - airflow/providers/amazon/index - airflow/providers/apache/cassandra/index - airflow/providers/apache/druid/index - airflow/providers/apache/hdfs/index - airflow/providers/apache/hive/index - airflow/providers/apache/kylin/index - airflow/providers/apache/livy/index - airflow/providers/apache/pig/index - airflow/providers/apache/pinot/index - airflow/providers/apache/spark/index - airflow/providers/apache/sqoop/index - airflow/providers/celery/index - airflow/providers/cloudant/index - airflow/providers/cncf/kubernetes/index - airflow/providers/databricks/index - airflow/providers/datadog/index - airflow/providers/dingding/index - airflow/providers/discord/index - airflow/providers/docker/index - airflow/providers/elasticsearch/index - airflow/providers/exasol/index - airflow/providers/facebook/index - airflow/providers/ftp/index - airflow/providers/google/index - airflow/providers/grpc/index - airflow/providers/hashicorp/index - airflow/providers/http/index - airflow/providers/imap/index - airflow/providers/jdbc/index - airflow/providers/jenkins/index - airflow/providers/jira/index - airflow/providers/microsoft/azure/index - airflow/providers/microsoft/mssql/index - airflow/providers/microsoft/winrm/index - airflow/providers/mongo/index - airflow/providers/mysql/index - airflow/providers/odbc/index - airflow/providers/openfaas/index - airflow/providers/opsgenie/index - airflow/providers/oracle/index - airflow/providers/pagerduty/index - airflow/providers/papermill/index - airflow/providers/plexus/index - airflow/providers/postgres/index - airflow/providers/presto/index - airflow/providers/qubole/index - airflow/providers/redis/index - airflow/providers/salesforce/index - airflow/providers/samba/index - airflow/providers/segment/index - airflow/providers/sendgrid/index - airflow/providers/sftp/index - airflow/providers/singularity/index - airflow/providers/slack/index - airflow/providers/snowflake/index - airflow/providers/sqlite/index - airflow/providers/ssh/index - airflow/providers/vertica/index - airflow/providers/yandex/index - airflow/providers/zendesk/index diff --git a/docs/build_docs.py b/docs/build_docs.py index 74063f1a3dcbd..63325c7df96b2 100755 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -16,19 +16,32 @@ # specific language governing permissions and limitations # under the License. import argparse -import ast +import fnmatch import os import re import shlex import shutil import sys -from contextlib import suppress -from functools import total_ordering +from collections import defaultdict from glob import glob -from itertools import chain from subprocess import run -from tempfile import NamedTemporaryFile -from typing import Iterable, List, NamedTuple, Optional, Set +from tempfile import NamedTemporaryFile, TemporaryDirectory +from typing import Dict, List, Optional, Tuple + +from tabulate import tabulate + +from docs.exts.docs_build import dev_index_generator, lint_checks # pylint: disable=no-name-in-module +from docs.exts.docs_build.errors import ( # pylint: disable=no-name-in-module + DocBuildError, + display_errors_summary, + parse_sphinx_warnings, +) +from docs.exts.docs_build.spelling_checks import ( # pylint: disable=no-name-in-module + SpellingError, + display_spelling_error_summary, + parse_spelling_warnings, +) +from docs.exts.provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module if __name__ != "__main__": raise Exception( @@ -36,638 +49,273 @@ "To run this script, run the ./build_docs.py command" ) - -@total_ordering -class DocBuildError(NamedTuple): - """Errors found in docs build.""" - - file_path: Optional[str] - line_no: Optional[int] - message: str - - def __eq__(self, other): - left = (self.file_path, self.line_no, self.message) - right = (other.file_path, other.line_no, other.message) - return left == right - - def __ne__(self, other): - return not self == other - - def __lt__(self, other): - file_path_a = self.file_path or '' - file_path_b = other.file_path or '' - line_no_a = self.line_no or 0 - line_no_b = other.line_no or 0 - return (file_path_a, line_no_a, self.message) < (file_path_b, line_no_b, other.message) - - -@total_ordering -class SpellingError(NamedTuple): - """Spelling errors found when building docs.""" - - file_path: Optional[str] - line_no: Optional[int] - spelling: Optional[str] - suggestion: Optional[str] - context_line: Optional[str] - message: str - - def __eq__(self, other): - left = (self.file_path, self.line_no, self.spelling, self.context_line, self.message) - right = (other.file_path, other.line_no, other.spelling, other.context_line, other.message) - return left == right - - def __ne__(self, other): - return not self == other - - def __lt__(self, other): - file_path_a = self.file_path or '' - file_path_b = other.file_path or '' - line_no_a = self.line_no or 0 - line_no_b = other.line_no or 0 - context_line_a = self.context_line or '' - context_line_b = other.context_line or '' - return (file_path_a, line_no_a, context_line_a, self.spelling, self.message) < ( - file_path_b, - line_no_b, - context_line_b, - other.spelling, - other.message, - ) - - -build_errors: List[DocBuildError] = [] -spelling_errors: List[SpellingError] = [] - ROOT_PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) ROOT_PACKAGE_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow") DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs") -PROVIDER_INIT_FILE = os.path.join(ROOT_PACKAGE_DIR, "providers", "__init__.py") - -_API_DIR = os.path.join(DOCS_DIR, "_api") -_BUILD_DIR = os.path.join(DOCS_DIR, "_build") - - -def clean_files() -> None: - """Cleanup all artifacts generated by previous builds.""" - shutil.rmtree(_API_DIR, ignore_errors=True) - shutil.rmtree(_BUILD_DIR, ignore_errors=True) - os.makedirs(_API_DIR, exist_ok=True) - os.makedirs(_BUILD_DIR, exist_ok=True) - print(f"Recreated content of the {shlex.quote(_BUILD_DIR)} and {shlex.quote(_API_DIR)} folders") +ALL_PROVIDER_YAMLS = load_package_data() +CHANNEL_INVITATION = """\ +If you need help, write to #documentation channel on Airflow's Slack. +Channel link: https://apache-airflow.slack.com/archives/CJ1LVREHX +Invitation link: https://s.apache.org/airflow-slack\ +""" -def display_errors_summary() -> None: - """Displays summary of errors""" - for warning_no, error in enumerate(sorted(build_errors), 1): - print("=" * 20, f"Error {warning_no:3}", "=" * 20) - print(error.message) - print() - if error.file_path and error.line_no: - print(f"File path: {error.file_path} ({error.line_no})") - print() - print(prepare_code_snippet(error.file_path, error.line_no)) - elif error.file_path: - print(f"File path: {error.file_path}") - print("=" * 50) +class AirflowDocsBuilder: + """Documentation builder for Airflow.""" + def __init__(self, package_name: str): + self.package_name = package_name -def display_spelling_error_summary() -> None: - """Displays summary of Spelling errors""" - for warning_no, error in enumerate(sorted(spelling_errors), 1): - print("=" * 20, f"Error {warning_no:3}", "=" * 20) - print(error.message) - print() - if error.file_path: - print(f"File path: {error.file_path}") - if error.spelling: - print(f"Incorrect Spelling: '{error.spelling}'") - if error.suggestion: - print(f"Suggested Spelling: '{error.suggestion}'") - if error.context_line: - print(f"Line with Error: '{error.context_line}'") - if error.line_no: - print(f"Line Number: {error.line_no}") - print(prepare_code_snippet(os.path.join(DOCS_DIR, error.file_path), error.line_no)) - - print("=" * 50) - print() - msg = """ -If the spelling is correct, add the spelling to docs/spelling_wordlist.txt -or use the spelling directive. -Check https://sphinxcontrib-spelling.readthedocs.io/en/latest/customize.html#private-dictionaries -for more details. - """ - print(msg) - print() - - -def find_existing_guide_operator_names() -> Set[str]: - """ - Find names of existing operators. - :return names of existing operators. - """ - operator_names = set() - - paths = glob(f"${DOCS_DIR}/howto/operator/**/*.rst", recursive=True) - for path in paths: - with open(path) as f: - operator_names |= set(re.findall(".. _howto/operator:(.+?):", f.read())) - - return operator_names - - -def extract_ast_class_def_by_name(ast_tree, class_name): - """ - Extracts class definition by name - :param ast_tree: AST tree - :param class_name: name of the class. - :return: class node found - """ - - class ClassVisitor(ast.NodeVisitor): - """Visitor.""" - - def __init__(self): - self.found_class_node = None - - def visit_ClassDef(self, node): # pylint: disable=invalid-name - """ - Visit class definition. - :param node: node. - :return: - """ - if node.name == class_name: - self.found_class_node = node - - visitor = ClassVisitor() - visitor.visit(ast_tree) - - return visitor.found_class_node - - -def check_guide_links_in_operator_descriptions() -> None: - """Check if there are links to guides in operator's descriptions.""" - - def generate_build_error(path, line_no, operator_name): - return DocBuildError( - file_path=path, - line_no=line_no, - message=( - f"Link to the guide is missing in operator's description: {operator_name}.\n" - f"Please add link to the guide to the description in the following form:\n" - f"\n" - f".. seealso::\n" - f" For more information on how to use this operator, take a look at the guide:\n" - f" :ref:`howto/operator:{operator_name}`\n" - ), - ) - - # Extract operators for which there are existing .rst guides - operator_names = find_existing_guide_operator_names() - - # Extract all potential python modules that can contain operators - python_module_paths = chain( - glob(f"{ROOT_PACKAGE_DIR}/operators/*.py"), - glob(f"{ROOT_PACKAGE_DIR}/sensors/*.py"), - glob(f"{ROOT_PACKAGE_DIR}/providers/**/operators/*.py", recursive=True), - glob(f"{ROOT_PACKAGE_DIR}/providers/**/sensors/*.py", recursive=True), - glob(f"{ROOT_PACKAGE_DIR}/providers/**/transfers/*.py", recursive=True), - ) + @property + def _doctree_dir(self) -> str: + return f"{DOCS_DIR}/_doctrees/docs/{self.package_name}" - for py_module_path in python_module_paths: - with open(py_module_path) as f: - py_content = f.read() - - if "This module is deprecated" in py_content: - continue - - for existing_operator in operator_names: - if f"class {existing_operator}" not in py_content: - continue - # This is a potential file with necessary class definition. - # To make sure it's a real Python class definition, we build AST tree - ast_tree = ast.parse(py_content) - class_def = extract_ast_class_def_by_name(ast_tree, existing_operator) - - if class_def is None: - continue - - docstring = ast.get_docstring(class_def) - if "This class is deprecated." in docstring: - continue - - if f":ref:`howto/operator:{existing_operator}`" in ast.get_docstring(class_def): - continue - - build_errors.append(generate_build_error(py_module_path, class_def.lineno, existing_operator)) - - -def assert_file_not_contains(file_path: str, pattern: str, message: str) -> None: - """ - Asserts that file does not contain the pattern. Return message error if it does. - :param file_path: file - :param pattern: pattern - :param message: message to return - """ - with open(file_path, "rb", 0) as doc_file: - pattern_compiled = re.compile(pattern) - - for num, line in enumerate(doc_file, 1): - line_decode = line.decode() - if re.search(pattern_compiled, line_decode): - build_errors.append(DocBuildError(file_path=file_path, line_no=num, message=message)) - - -def filter_file_list_by_pattern(file_paths: Iterable[str], pattern: str) -> List[str]: - """ - Filters file list to those tha content matches the pattern - :param file_paths: file paths to check - :param pattern: pattern to match - :return: list of files matching the pattern - """ - output_paths = [] - pattern_compiled = re.compile(pattern) - for file_path in file_paths: - with open(file_path, "rb", 0) as text_file: - text_file_content = text_file.read().decode() - if re.findall(pattern_compiled, text_file_content): - output_paths.append(file_path) - return output_paths - - -def find_modules(deprecated_only: bool = False) -> Set[str]: - """ - Finds all modules. - :param deprecated_only: whether only deprecated modules should be found. - :return: set of all modules found - """ - file_paths = glob(f"{ROOT_PACKAGE_DIR}/**/*.py", recursive=True) - # Exclude __init__.py - file_paths = [f for f in file_paths if not f.endswith("__init__.py")] - if deprecated_only: - file_paths = filter_file_list_by_pattern(file_paths, r"This module is deprecated.") - # Make path relative - file_paths = [os.path.relpath(f, ROOT_PROJECT_DIR) for f in file_paths] - # Convert filename to module - modules_names = {file_path.rpartition(".")[0].replace("/", ".") for file_path in file_paths} - return modules_names - - -def check_exampleinclude_for_example_dags(): - """Checks all exampleincludes for example dags.""" - all_docs_files = glob(f"${DOCS_DIR}/**/*rst", recursive=True) - - for doc_file in all_docs_files: - assert_file_not_contains( - file_path=doc_file, - pattern=r"literalinclude::.+example_dags", - message=( - "literalinclude directive is prohibited for example DAGs. \n" - "You should use the exampleinclude directive to include example DAGs." - ), - ) - - -def check_enforce_code_block(): - """Checks all code:: blocks.""" - all_docs_files = glob(f"${DOCS_DIR}/**/*rst", recursive=True) - - for doc_file in all_docs_files: - assert_file_not_contains( - file_path=doc_file, - pattern=r"^.. code::", - message=( - "We recommend using the code-block directive instead of the code directive. " - "The code-block directive is more feature-full." - ), - ) - - -MISSING_GOOGLE_DOC_GUIDES = { - "ads_to_gcs", - 'adls_to_gcs', - 'bigquery_to_bigquery', - 'bigquery_to_gcs', - 'bigquery_to_mysql', - 'cassandra_to_gcs', - 'dataflow', - 'gcs_to_bigquery', - 'mssql_to_gcs', - 'postgres_to_gcs', - 'sql_to_gcs', - 'tasks', -} - - -def check_google_guides(): - """Checks Google guides.""" - doc_files = glob(f"{DOCS_DIR}/howto/operator/google/**/*.rst", recursive=True) - doc_names = {f.split("/")[-1].rsplit(".")[0] for f in doc_files} - - operators_files = chain( - *[ - glob(f"{ROOT_PACKAGE_DIR}/providers/google/*/{resource_type}/*.py") - for resource_type in ["operators", "sensors", "transfers"] - ] - ) - operators_files = (f for f in operators_files if not f.endswith("__init__.py")) - operator_names = {f.split("/")[-1].rsplit(".")[0] for f in operators_files} - - # Detect missing docs: - missing_guide = operator_names - doc_names - missing_guide -= MISSING_GOOGLE_DOC_GUIDES - if missing_guide: - missing_guide_text = " * " + "\n * ".join(missing_guide) - message = ( - "You've added a new operators, but it looks like you haven't added the guide.\n" - f"{missing_guide_text}" - "\n" - "Could you add it?\n" - ) - build_errors.append(DocBuildError(file_path=None, line_no=None, message=message)) - - # Keep update missing missing guide list - new_guides = set(doc_names).intersection(set(MISSING_GOOGLE_DOC_GUIDES)) - if new_guides: - new_guides_text = " * " + "\n * ".join(new_guides) - message = ( - "You've added a guide currently listed as missing:\n" - f"{new_guides_text}" - "\n" - "Thank you very much.\n" - "Can you remove it from the list of missing guide, please?" - ) - build_errors.append(DocBuildError(file_path=__file__, line_no=None, message=message)) - - -def prepare_code_snippet(file_path: str, line_no: int, context_lines_count: int = 5) -> str: - """ - Prepares code snippet. - :param file_path: file path - :param line_no: line number - :param context_lines_count: number of lines of context. - :return: - """ - - def guess_lexer_for_filename(filename): - from pygments.lexers import get_lexer_for_filename - from pygments.util import ClassNotFound - - try: - lexer = get_lexer_for_filename(filename) - except ClassNotFound: - from pygments.lexers.special import TextLexer - - lexer = TextLexer() - return lexer - - with open(file_path) as text_file: - # Highlight code - code = text_file.read() - with suppress(ImportError): - import pygments - from pygments.formatters.terminal import TerminalFormatter - - code = pygments.highlight( - code=code, formatter=TerminalFormatter(), lexer=guess_lexer_for_filename(file_path) - ) + @property + def _out_dir(self) -> str: + return f"{DOCS_DIR}/_build/docs/{self.package_name}/latest" - code_lines = code.split("\n") - # Prepend line number - code_lines = [f"{line_no:4} | {line}" for line_no, line in enumerate(code_lines, 1)] - # # Cut out the snippet - start_line_no = max(0, line_no - context_lines_count) - end_line_no = line_no + context_lines_count - code_lines = code_lines[start_line_no:end_line_no] - # Join lines - code = "\n".join(code_lines) - return code - - -def parse_sphinx_warnings(warning_text: str) -> List[DocBuildError]: - """ - Parses warnings from Sphinx. - :param warning_text: warning to parse - :return: list of DocBuildErrors. - """ - sphinx_build_errors = [] - for sphinx_warning in warning_text.split("\n"): - if not sphinx_warning: - continue - warning_parts = sphinx_warning.split(":", 2) - if len(warning_parts) == 3: - try: - sphinx_build_errors.append( - DocBuildError( - file_path=warning_parts[0], line_no=int(warning_parts[1]), message=warning_parts[2] - ) - ) - except Exception: # noqa pylint: disable=broad-except - # If an exception occurred while parsing the warning message, display the raw warning message. - sphinx_build_errors.append( - DocBuildError(file_path=None, line_no=None, message=sphinx_warning) - ) + @property + def _src_dir(self) -> str: + # TODO(mik-laj): + # After migrating the content from the core to providers, we should move all documentation from . + # to /airflow/ to keep the directory structure more maintainable. + if self.package_name == 'apache-airflow': + return DOCS_DIR + elif self.package_name.startswith('apache-airflow-providers'): + return f"{DOCS_DIR}/{self.package_name}" else: - sphinx_build_errors.append(DocBuildError(file_path=None, line_no=None, message=sphinx_warning)) - return sphinx_build_errors - - -def parse_spelling_warnings(warning_text: str) -> List[SpellingError]: - """ - Parses warnings from Sphinx. - - :param warning_text: warning to parse - :return: list of SpellingError. - """ - sphinx_spelling_errors = [] - for sphinx_warning in warning_text.split("\n"): - if not sphinx_warning: - continue - warning_parts = None - match = re.search(r"(.*):(\w*):\s\((\w*)\)\s?(\w*)\s?(.*)", sphinx_warning) - if match: - warning_parts = match.groups() - if warning_parts and len(warning_parts) == 5: - try: - sphinx_spelling_errors.append( - SpellingError( - file_path=warning_parts[0], - line_no=int(warning_parts[1]) if warning_parts[1] not in ('None', '') else None, - spelling=warning_parts[2], - suggestion=warning_parts[3] if warning_parts[3] else None, - context_line=warning_parts[4], - message=sphinx_warning, - ) - ) - except Exception: # noqa pylint: disable=broad-except - # If an exception occurred while parsing the warning message, display the raw warning message. - sphinx_spelling_errors.append( + raise Exception(F"Unsupported package: {self.package_name}") + + def clean_files(self) -> None: + """Cleanup all artifacts generated by previous builds.""" + api_dir = os.path.join(self._src_dir, "_api") + + shutil.rmtree(api_dir, ignore_errors=True) + shutil.rmtree(self._out_dir, ignore_errors=True) + os.makedirs(api_dir, exist_ok=True) + os.makedirs(self._out_dir, exist_ok=True) + + print(f"Recreated content of the {shlex.quote(self._out_dir)} and {shlex.quote(api_dir)} folders") + + def check_spelling(self): + """Checks spelling.""" + spelling_errors = [] + with TemporaryDirectory() as tmp_dir: + build_cmd = [ + "sphinx-build", + "-W", # turn warnings into errors + "-T", # show full traceback on exception + "-b", # builder to use + "spelling", + "-c", + DOCS_DIR, + "-d", # path for the cached environment and doctree files + self._doctree_dir, + self._src_dir, # path to documentation source files + tmp_dir, + ] + print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd])) + env = os.environ.copy() + env['AIRFLOW_PACKAGE_NAME'] = self.package_name + completed_proc = run( # pylint: disable=subprocess-run-check + build_cmd, cwd=self._src_dir, env=env + ) + if completed_proc.returncode != 0: + spelling_errors.append( SpellingError( file_path=None, line_no=None, spelling=None, suggestion=None, context_line=None, - message=sphinx_warning, + message=( + f"Sphinx spellcheck returned non-zero exit status: {completed_proc.returncode}." + ), ) ) - else: - sphinx_spelling_errors.append( - SpellingError( - file_path=None, - line_no=None, - spelling=None, - suggestion=None, - context_line=None, - message=sphinx_warning, - ) + warning_text = "" + for filepath in glob(f"{tmp_dir}/**/*.spelling", recursive=True): + with open(filepath) as speeling_file: + warning_text += speeling_file.read() + + spelling_errors.extend(parse_spelling_warnings(warning_text, self._src_dir)) + return spelling_errors + + def build_sphinx_docs(self) -> List[DocBuildError]: + """Build Sphinx documentation""" + build_errors = [] + with NamedTemporaryFile() as tmp_file: + build_cmd = [ + "sphinx-build", + "-T", # show full traceback on exception + "--color", # do emit colored output + "-b", # builder to use + "html", + "-d", # path for the cached environment and doctree files + self._doctree_dir, + "-c", + DOCS_DIR, + "-w", # write warnings (and errors) to given file + tmp_file.name, + self._src_dir, # path to documentation source files + self._out_dir, # path to output directory + ] + print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd])) + env = os.environ.copy() + env['AIRFLOW_PACKAGE_NAME'] = self.package_name + completed_proc = run( # pylint: disable=subprocess-run-check + build_cmd, cwd=self._src_dir, env=env ) - return sphinx_spelling_errors - - -def check_spelling() -> None: - """ - Checks spelling for sphinx. - - :return: - """ - extensions_to_use = [ - 'provider_init_hack', - "sphinxarg.ext", - "autoapi.extension", - "sphinxcontrib.spelling", - "exampleinclude", - "sphinx.ext.autodoc", - "sphinx.ext.coverage", - "sphinx.ext.viewcode", - "sphinx.ext.graphviz", - "sphinxcontrib.httpdomain", - "sphinxcontrib.jinja", - "docroles", - "removemarktransform", - 'providers_packages_ref', - 'operators_and_hooks_ref', - ] - - with NamedTemporaryFile() as tmp_file: - build_cmd = [ - "sphinx-build", - "-W", # turn warnings into errors - "-T", # show full traceback on exception - "-b", # builder to use - "spelling", - "-d", # path for the cached environment and doctree files - "_build/doctrees", - "-D", # override the extensions because one of them throws an error on the spelling builder - f"extensions={','.join(extensions_to_use)}", - ".", # path to documentation source files - "_build/spelling", - ] - print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd])) - - completed_proc = run(build_cmd, cwd=DOCS_DIR) # pylint: disable=subprocess-run-check - if completed_proc.returncode != 0: - spelling_errors.append( - SpellingError( - file_path=None, - line_no=None, - spelling=None, - suggestion=None, - context_line=None, - message=f"Sphinx spellcheck returned non-zero exit status: {completed_proc.returncode}.", + if completed_proc.returncode != 0: + build_errors.append( + DocBuildError( + file_path=None, + line_no=None, + message=f"Sphinx returned non-zero exit status: {completed_proc.returncode}.", + ) ) - ) - - # pylint: disable=subprocess-run-check - run(f"find {DOCS_DIR} -name '*.spelling' -exec cat {{}} + >> {tmp_file.name}", shell=True) tmp_file.seek(0) warning_text = tmp_file.read().decode() - sphinx_build_errors = parse_spelling_warnings(warning_text) - spelling_errors.extend(sphinx_build_errors) - - -def build_sphinx_docs() -> None: - """Build documentation for sphinx.""" - with NamedTemporaryFile() as tmp_file: - build_cmd = [ - "sphinx-build", - "-T", # show full traceback on exception - "--color", # do emit colored output - "-b", # builder to use - "html", - "-d", # path for the cached environment and doctree files - "_build/doctrees", - "-w", # write warnings (and errors) to given file - tmp_file.name, - ".", # path to documentation source files - "_build/html", # path to output directory - ] - print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd])) - - completed_proc = run(build_cmd, cwd=DOCS_DIR) # pylint: disable=subprocess-run-check - if completed_proc.returncode != 0: - build_errors.append( - DocBuildError( - file_path=None, - line_no=None, - message=f"Sphinx returned non-zero exit status: {completed_proc.returncode}.", - ) - ) - tmp_file.seek(0) - warning_text = tmp_file.read().decode() - # Remove 7-bit C1 ANSI escape sequences - warning_text = re.sub(r"\x1B[@-_][0-?]*[ -/]*[@-~]", "", warning_text) - sphinx_build_errors = parse_sphinx_warnings(warning_text) - build_errors.extend(sphinx_build_errors) - - -def print_build_errors_and_exit(message) -> None: - """ - Prints build errors and exists. - :param message: - :return: - """ + # Remove 7-bit C1 ANSI escape sequences + warning_text = re.sub(r"\x1B[@-_][0-?]*[ -/]*[@-~]", "", warning_text) + build_errors.extend(parse_sphinx_warnings(warning_text, self._src_dir)) + return build_errors + + +def get_available_packages(): + """Get list of all available packages to build.""" + provider_package_names = [provider['package-name'] for provider in ALL_PROVIDER_YAMLS] + return ["apache-airflow", *provider_package_names] + + +def _get_parser(): + available_packages_list = " * " + "\n * ".join(get_available_packages()) + parser = argparse.ArgumentParser( + description='Builds documentation and runs spell checking', + epilog=f"List of supported packages:\n{available_packages_list}" "", + ) + parser.formatter_class = argparse.RawTextHelpFormatter + parser.add_argument( + '--disable-checks', dest='disable_checks', action='store_true', help='Disables extra checks' + ) + parser.add_argument( + "--package-filter", + help=( + "Filter specifying for which packages the documentation is to be built. Wildcaard is supported." + ), + ) + parser.add_argument('--docs-only', dest='docs_only', action='store_true', help='Only build documentation') + parser.add_argument( + '--spellcheck-only', dest='spellcheck_only', action='store_true', help='Only perform spellchecking' + ) + return parser + + +def build_docs_for_packages( + current_packages: List[str], docs_only: bool, spellcheck_only: bool +) -> Tuple[Dict[str, List[DocBuildError]], Dict[str, List[SpellingError]]]: + """Builds documentation for single package and returns errors""" + all_build_errors: Dict[str, List[DocBuildError]] = defaultdict(list) + all_spelling_errors: Dict[str, List[SpellingError]] = defaultdict(list) + for package_name in current_packages: + builder = AirflowDocsBuilder(package_name=package_name) + builder.clean_files() + if not docs_only: + spelling_errors = builder.check_spelling() + if spelling_errors: + all_spelling_errors[package_name].extend(spelling_errors) + + if not spellcheck_only: + docs_errors = builder.build_sphinx_docs() + if docs_errors: + all_build_errors[package_name].extend(docs_errors) + + return all_build_errors, all_spelling_errors + + +def display_packages_summary( + build_errors: Dict[str, List[DocBuildError]], spelling_errors: Dict[str, List[SpellingError]] +): + """Displays a summary that contains information on the number of errors in each packages""" + packages_names = {*build_errors.keys(), *spelling_errors.keys()} + tabular_data = [ + { + "Package name": package_name, + "Count of doc build errors": len(build_errors.get(package_name, [])), + "Count of spelling errors": len(spelling_errors.get(package_name, [])), + } + for package_name in sorted(packages_names, key=lambda k: k or '') + ] + print("#" * 20, "Packages errors summary", "#" * 20) + print(tabulate(tabular_data=tabular_data, headers="keys")) + print("#" * 50) + + +def print_build_errors_and_exit( + message: str, + build_errors: Dict[str, List[DocBuildError]], + spelling_errors: Dict[str, List[SpellingError]], +) -> None: + """Prints build errors and exists.""" if build_errors or spelling_errors: if build_errors: - display_errors_summary() + display_errors_summary(build_errors) print() if spelling_errors: - display_spelling_error_summary() + display_spelling_error_summary(spelling_errors) print() print(message) + display_packages_summary(build_errors, spelling_errors) print() print(CHANNEL_INVITATION) sys.exit(1) -parser = argparse.ArgumentParser(description='Builds documentation and runs spell checking') -parser.add_argument('--docs-only', dest='docs_only', action='store_true', help='Only build documentation') -parser.add_argument( - '--spellcheck-only', dest='spellcheck_only', action='store_true', help='Only perform spellchecking' -) +def main(): + """Main code""" + args = _get_parser().parse_args() + available_packages = get_available_packages() + print("Available packages: ", available_packages) -args = parser.parse_args() + docs_only = args.docs_only + spellcheck_only = args.spellcheck_only + disable_checks = args.disable_checks + package_filter = args.package_filter -clean_files() + print("Current package filter: ", package_filter) + current_packages = ( + fnmatch.filter(available_packages, package_filter) if package_filter else available_packages + ) -CHANNEL_INVITATION = """\ -If you need help, write to #documentation channel on Airflow's Slack. -Channel link: https://apache-airflow.slack.com/archives/CJ1LVREHX -Invitation link: https://s.apache.org/airflow-slack\ -""" + print(f"Documentation will be built for {len(current_packages)} package(s): {current_packages}") -print_build_errors_and_exit("The documentation has errors. Fix them to build documentation.") + all_build_errors: Dict[Optional[str], List[DocBuildError]] = {} + all_spelling_errors: Dict[Optional[str], List[SpellingError]] = {} + package_build_errors, package_spelling_errors = build_docs_for_packages( + current_packages=current_packages, + docs_only=docs_only, + spellcheck_only=spellcheck_only, + ) + if package_build_errors: + all_build_errors.update(package_build_errors) + if package_spelling_errors: + all_spelling_errors.update(package_spelling_errors) + + if not disable_checks: + general_errors = [] + general_errors.extend(lint_checks.check_guide_links_in_operator_descriptions()) + general_errors.extend(lint_checks.check_enforce_code_block()) + general_errors.extend(lint_checks.check_exampleinclude_for_example_dags()) + if general_errors: + all_build_errors[None] = general_errors + + dev_index_generator.generate_index(f"{DOCS_DIR}/_build/index.html") + print_build_errors_and_exit( + "The documentation has errors.", + all_build_errors, + all_spelling_errors, + ) -if not args.docs_only: - check_spelling() - print_build_errors_and_exit("The documentation has spelling errors. Fix them to build documentation.") -if not args.spellcheck_only: - build_sphinx_docs() - check_guide_links_in_operator_descriptions() - check_enforce_code_block() - check_exampleinclude_for_example_dags() - check_google_guides() - print_build_errors_and_exit("The documentation has errors.") +main() diff --git a/docs/conf.py b/docs/conf.py index 3ed6ada91e494..5c8bc06e03263 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -31,11 +31,13 @@ # All configuration values have a default; values that are commented out # serve to show the default. """Configuration of Airflow Docs""" +import glob import os import sys -from glob import glob from typing import List +import yaml + import airflow from airflow.configuration import default_config_yaml @@ -46,6 +48,36 @@ except ImportError: airflow_theme_is_available = False +sys.path.append(os.path.join(os.path.dirname(__file__), 'exts')) + +CONF_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) +ROOT_DIR = os.path.abspath(os.path.join(CONF_DIR, os.pardir)) + +# By default (e.g. on RTD), build docs for `airflow` package +PACKAGE_NAME = os.environ.get('AIRFLOW_PACKAGE_NAME', 'airflow') +if PACKAGE_NAME == 'apache-airflow': + os.environ['AIRFLOW_PACKAGE_NAME'] = 'airflow' + os.environ['AIRFLOW_PACKAGE_DIR'] = os.path.abspath(os.getcwd()) + os.environ['AIRFLOW_PACKAGE_VERSION'] = airflow.__version__ + PACKAGE_DIR = os.path.join(ROOT_DIR, 'airflow') + PACKAGE_VERSION = airflow.__version__ +else: + PACKAGE_NAME = os.environ['AIRFLOW_PACKAGE_NAME'] + from provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module + + ALL_PROVIDER_YAMLS = load_package_data() + try: + CURRENT_PROVIDER = next( + provider_yaml + for provider_yaml in ALL_PROVIDER_YAMLS + if provider_yaml['package-name'] == PACKAGE_NAME + ) + except StopIteration: + raise Exception(f"Could not find provider.yaml file for package: {PACKAGE_NAME}") + PACKAGE_DIR = CURRENT_PROVIDER['package-dir'] + PACKAGE_VERSION = 'master' + + # Hack to allow changing for piece of the code to behave differently while # the docs are being built. The main objective was to alter the # behavior of the utils.apply_default that was hiding function headers @@ -57,19 +89,11 @@ # See: https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information # General information about the project. -project = 'Airflow' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -# version = '1.0.0' -version = airflow.__version__ - +project = PACKAGE_NAME +# # The version info for the project you're documenting +version = PACKAGE_VERSION # The full version, including alpha/beta/rc tags. -# release = '1.0.0' -release = airflow.__version__ +release = PACKAGE_VERSION # -- General configuration ----------------------------------------------------- # See: https://www.sphinx-doc.org/en/master/usage/configuration.html @@ -80,52 +104,57 @@ extensions = [ 'provider_init_hack', 'sphinx.ext.autodoc', - 'sphinx.ext.coverage', 'sphinx.ext.viewcode', - 'sphinx.ext.graphviz', 'sphinxarg.ext', - 'sphinxcontrib.httpdomain', - 'sphinxcontrib.jinja', 'sphinx.ext.intersphinx', 'autoapi.extension', 'exampleinclude', 'docroles', 'removemarktransform', 'sphinx_copybutton', - 'redirects', - 'providers_packages_ref', - 'operators_and_hooks_ref', - # First, generate redoc - 'sphinxcontrib.redoc', - # Second, update redoc script - "sphinx_script_update", + 'airflow_intersphinx', "sphinxcontrib.spelling", ] - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. - -sys.path.append(os.path.join(os.path.dirname(__file__), 'exts')) +if PACKAGE_NAME == 'apache-airflow': + extensions.extend( + [ + 'sphinxcontrib.jinja', + 'sphinx.ext.graphviz', + 'sphinxcontrib.httpdomain', + 'sphinxcontrib.httpdomain', + 'providers_packages_ref', + 'operators_and_hooks_ref', + # First, generate redoc + 'sphinxcontrib.redoc', + # Second, update redoc script + "sphinx_script_update", + ] + ) # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns: List[str] = [ - # We only link to selected subpackages. - '_api/airflow/index.rst', - # We have custom page - operators-and-hooks-ref.rst - '_api/airflow/providers/index.rst', - # Packages with subpackages - "_api/airflow/providers/microsoft/index.rst", - "_api/airflow/providers/apache/index.rst", - "_api/airflow/providers/cncf/index.rst", - # Templates or partials - 'autoapi_templates', - 'howto/operator/google/_partials', - 'howto/operator/microsoft/_partials', -] - -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) +exclude_patterns: List[str] +if PACKAGE_NAME == 'apache-airflow': + exclude_patterns = [ + # We only link to selected subpackages. + '_api/airflow/index.rst', + # We have custom page - operators-and-hooks-ref.rst + '_api/airflow/providers/index.rst', + # Packages with subpackages + "_api/airflow/providers/microsoft/index.rst", + "_api/airflow/providers/apache/index.rst", + "_api/airflow/providers/cncf/index.rst", + # Templates or partials + 'autoapi_templates', + 'howto/operator/google/_partials', + 'howto/operator/microsoft/_partials', + 'apache-airflow-providers-*/', + 'README.rst', + ] + glob.glob('apache-airflow-providers-*') +else: + exclude_patterns = [ + '/_partials/', + ] def _get_rst_filepath_from_path(filepath: str): @@ -141,30 +170,26 @@ def _get_rst_filepath_from_path(filepath: str): return result -# Exclude top-level packages -# do not exclude these top-level modules from the doc build: -_allowed_top_level = ("exceptions.py",) +if PACKAGE_NAME == 'apache-airflow': + # Exclude top-level packages + # do not exclude these top-level modules from the doc build: + _allowed_top_level = ("exceptions.py",) -for path in glob(f"{ROOT_DIR}/airflow/*"): - name = os.path.basename(path) - if os.path.isfile(path) and not path.endswith(_allowed_top_level): - exclude_patterns.append(f"_api/airflow/{name.rpartition('.')[0]}") - browsable_packages = ["operators", "hooks", "sensors", "providers", "executors", "models", "secrets"] - if os.path.isdir(path) and name not in browsable_packages: - exclude_patterns.append(f"_api/airflow/{name}") + for path in glob.glob(f"{ROOT_DIR}/airflow/*"): + name = os.path.basename(path) + if os.path.isfile(path) and not path.endswith(_allowed_top_level): + exclude_patterns.append(f"_api/airflow/{name.rpartition('.')[0]}") + browsable_packages = ["operators", "hooks", "sensors", "providers", "executors", "models", "secrets"] + if os.path.isdir(path) and name not in browsable_packages: + exclude_patterns.append(f"_api/airflow/{name}") +else: + exclude_patterns.extend( + _get_rst_filepath_from_path(f) for f in glob.glob(f"{PACKAGE_DIR}/**/example_dags/**/*.py") + ) # Add any paths that contain templates here, relative to this directory. templates_path = ['templates'] -# The suffix of source filenames. -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - # If true, keep warnings as "system message" paragraphs in the built documents. keep_warnings = True @@ -180,8 +205,10 @@ def _get_rst_filepath_from_path(filepath: str): # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -html_title = "Airflow Documentation" - +if PACKAGE_NAME == 'apache-airflow': + html_title = "Airflow Documentation" +else: + html_title = f"{PACKAGE_NAME} Documentation" # A shorter title for the navigation bar. Default is the same as html_title. html_short_title = "" @@ -194,13 +221,18 @@ def _get_rst_filepath_from_path(filepath: str): # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['static'] - +if PACKAGE_NAME == 'apache-airflow': + html_static_path = ['static'] +else: + html_static_path = [] # A list of JavaScript filename. The entry must be a filename string or a # tuple containing the filename string and the attributes dictionary. The # filename must be relative to the html_static_path, or a full URI with # scheme like http://example.org/script.js. -html_js_files = ['jira-links.js'] +if PACKAGE_NAME == 'apache-airflow': + html_js_files = ['jira-links.js'] +else: + html_js_files = [] # Custom sidebar templates, maps document names to template names. if airflow_theme_is_available: @@ -255,7 +287,23 @@ def _get_rst_filepath_from_path(filepath: str): # See: /~https://github.com/tardyp/sphinx-jinja # Jinja context -jinja_contexts = {'config_ctx': {"configs": default_config_yaml()}} +if PACKAGE_NAME == 'apache-airflow': + jinja_contexts = {'config_ctx': {"configs": default_config_yaml()}} +else: + + def _load_config(): + templates_dir = os.path.join(PACKAGE_DIR, 'config_templates') + file_path = os.path.join(templates_dir, "config.yml") + if not os.path.exists(file_path): + return {} + + with open(file_path) as config_file: + return yaml.safe_load(config_file) + + config = _load_config() + if config: + jinja_contexts = {'config_ctx': {"configs": config}} + extensions.append('sphinxcontrib.jinja') # -- Options for sphinx.ext.autodoc -------------------------------------------- # See: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html @@ -335,32 +383,42 @@ def _get_rst_filepath_from_path(filepath: str): 'python': ('https://docs.python.org/3/', None), 'requests': ('https://requests.readthedocs.io/en/master/', None), 'sqlalchemy': ('https://docs.sqlalchemy.org/en/latest/', None), - # google-api - 'google-api-core': ('https://googleapis.dev/python/google-api-core/latest', None), - 'google-cloud-automl': ('https://googleapis.dev/python/automl/latest', None), - 'google-cloud-bigquery': ('https://googleapis.dev/python/bigquery/latest', None), - 'google-cloud-bigquery-datatransfer': ('https://googleapis.dev/python/bigquerydatatransfer/latest', None), - 'google-cloud-bigquery-storage': ('https://googleapis.dev/python/bigquerystorage/latest', None), - 'google-cloud-bigtable': ('https://googleapis.dev/python/bigtable/latest', None), - 'google-cloud-container': ('https://googleapis.dev/python/container/latest', None), - 'google-cloud-core': ('https://googleapis.dev/python/google-cloud-core/latest', None), - 'google-cloud-datacatalog': ('https://googleapis.dev/python/datacatalog/latest', None), - 'google-cloud-datastore': ('https://googleapis.dev/python/datastore/latest', None), - 'google-cloud-dlp': ('https://googleapis.dev/python/dlp/latest', None), - 'google-cloud-kms': ('https://googleapis.dev/python/cloudkms/latest', None), - 'google-cloud-language': ('https://googleapis.dev/python/language/latest', None), - 'google-cloud-monitoring': ('https://googleapis.dev/python/monitoring/latest', None), - 'google-cloud-pubsub': ('https://googleapis.dev/python/pubsub/latest', None), - 'google-cloud-redis': ('https://googleapis.dev/python/redis/latest', None), - 'google-cloud-spanner': ('https://googleapis.dev/python/spanner/latest', None), - 'google-cloud-speech': ('https://googleapis.dev/python/speech/latest', None), - 'google-cloud-storage': ('https://googleapis.dev/python/storage/latest', None), - 'google-cloud-tasks': ('https://googleapis.dev/python/cloudtasks/latest', None), - 'google-cloud-texttospeech': ('https://googleapis.dev/python/texttospeech/latest', None), - 'google-cloud-translate': ('https://googleapis.dev/python/translation/latest', None), - 'google-cloud-videointelligence': ('https://googleapis.dev/python/videointelligence/latest', None), - 'google-cloud-vision': ('https://googleapis.dev/python/vision/latest', None), } +if PACKAGE_NAME in ('apache-airflow-providers-google', 'apache-airflow'): + intersphinx_mapping.update( + { + 'google-api-core': ('https://googleapis.dev/python/google-api-core/latest', None), + 'google-cloud-automl': ('https://googleapis.dev/python/automl/latest', None), + 'google-cloud-bigquery': ('https://googleapis.dev/python/bigquery/latest', None), + 'google-cloud-bigquery-datatransfer': ( + 'https://googleapis.dev/python/bigquerydatatransfer/latest', + None, + ), + 'google-cloud-bigquery-storage': ('https://googleapis.dev/python/bigquerystorage/latest', None), + 'google-cloud-bigtable': ('https://googleapis.dev/python/bigtable/latest', None), + 'google-cloud-container': ('https://googleapis.dev/python/container/latest', None), + 'google-cloud-core': ('https://googleapis.dev/python/google-cloud-core/latest', None), + 'google-cloud-datacatalog': ('https://googleapis.dev/python/datacatalog/latest', None), + 'google-cloud-datastore': ('https://googleapis.dev/python/datastore/latest', None), + 'google-cloud-dlp': ('https://googleapis.dev/python/dlp/latest', None), + 'google-cloud-kms': ('https://googleapis.dev/python/cloudkms/latest', None), + 'google-cloud-language': ('https://googleapis.dev/python/language/latest', None), + 'google-cloud-monitoring': ('https://googleapis.dev/python/monitoring/latest', None), + 'google-cloud-pubsub': ('https://googleapis.dev/python/pubsub/latest', None), + 'google-cloud-redis': ('https://googleapis.dev/python/redis/latest', None), + 'google-cloud-spanner': ('https://googleapis.dev/python/spanner/latest', None), + 'google-cloud-speech': ('https://googleapis.dev/python/speech/latest', None), + 'google-cloud-storage': ('https://googleapis.dev/python/storage/latest', None), + 'google-cloud-tasks': ('https://googleapis.dev/python/cloudtasks/latest', None), + 'google-cloud-texttospeech': ('https://googleapis.dev/python/texttospeech/latest', None), + 'google-cloud-translate': ('https://googleapis.dev/python/translation/latest', None), + 'google-cloud-videointelligence': ( + 'https://googleapis.dev/python/videointelligence/latest', + None, + ), + 'google-cloud-vision': ('https://googleapis.dev/python/vision/latest', None), + } + ) # -- Options for sphinx.ext.viewcode ------------------------------------------- # See: https://www.sphinx-doc.org/es/master/usage/extensions/viewcode.html @@ -375,28 +433,39 @@ def _get_rst_filepath_from_path(filepath: str): # Paths (relative or absolute) to the source code that you wish to generate # your API documentation from. autoapi_dirs = [ - os.path.abspath('../airflow'), + PACKAGE_DIR, ] # A directory that has user-defined templates to override our default templates. -autoapi_template_dir = 'autoapi_templates' +if PACKAGE_NAME == 'apache-airflow': + autoapi_template_dir = 'autoapi_templates' # A list of patterns to ignore when finding files autoapi_ignore = [ - '*/airflow/kubernetes/kubernetes_request_factory/*', + 'airflow/configuration/', + '*/example_dags/*', '*/_internal*', - '*/airflow/**/providers/**/utils/*', '*/node_modules/*', - '*/example_dags/*', '*/migrations/*', + '*/contrib/*', ] +if PACKAGE_NAME == 'apache-airflow': + autoapi_ignore.append('*/airflow/providers/*') # Keep the AutoAPI generated files on the filesystem after the run. # Useful for debugging. autoapi_keep_files = True # Relative path to output the AutoAPI files into. This can also be used to place the generated documentation # anywhere in your documentation hierarchy. -autoapi_root = '_api' +if PACKAGE_NAME == 'apache-airflow': + autoapi_root = '_api' +else: + autoapi_root = f'{PACKAGE_NAME}/_api' + +# Whether to insert the generated documentation into the TOC tree. If this is False, the default AutoAPI +# index page is not generated and you will need to include the generated documentation in a +# TOC tree entry yourself. +autoapi_add_toctree_entry = bool(PACKAGE_NAME == 'apache-airflow') # -- Options for ext.exampleinclude -------------------------------------------- exampleinclude_sourceroot = os.path.abspath('..') @@ -404,20 +473,26 @@ def _get_rst_filepath_from_path(filepath: str): # -- Options for ext.redirects ------------------------------------------------- redirects_file = 'redirects.txt' +# -- Options for sphinxcontrib-spelling ---------------------------------------- +spelling_word_list_filename = [os.path.join(CONF_DIR, 'spelling_wordlist.txt')] + # -- Options for sphinxcontrib.redoc ------------------------------------------- # See: https://sphinxcontrib-redoc.readthedocs.io/en/stable/ -OPENAPI_FILE = os.path.join(os.path.dirname(__file__), "..", "airflow", "api_connexion", "openapi", "v1.yaml") -redoc = [ - { - 'name': 'Airflow REST API', - 'page': 'stable-rest-api-ref', - 'spec': OPENAPI_FILE, - 'opts': { - 'hide-hostname': True, - 'no-auto-auth': True, +if PACKAGE_NAME == 'apache-airflow': + OPENAPI_FILE = os.path.join( + os.path.dirname(__file__), "..", "airflow", "api_connexion", "openapi", "v1.yaml" + ) + redoc = [ + { + 'name': 'Airflow REST API', + 'page': 'stable-rest-api-ref', + 'spec': OPENAPI_FILE, + 'opts': { + 'hide-hostname': True, + 'no-auto-auth': True, + }, }, - }, -] + ] -# Options for script updater -redoc_script_url = "https://cdn.jsdelivr.net/npm/redoc@2.0.0-rc.30/bundles/redoc.standalone.js" + # Options for script updater + redoc_script_url = "https://cdn.jsdelivr.net/npm/redoc@2.0.0-rc.30/bundles/redoc.standalone.js" diff --git a/docs/exts/airflow_intersphinx.py b/docs/exts/airflow_intersphinx.py new file mode 100644 index 0000000000000..0fb25230fdda7 --- /dev/null +++ b/docs/exts/airflow_intersphinx.py @@ -0,0 +1,181 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import time +from typing import Any, Dict + +from provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module +from sphinx.application import Sphinx + +CURRENT_DIR = os.path.dirname(__file__) +ROOT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir)) +DOCS_DIR = os.path.join(ROOT_DIR, 'docs') +DOCS_PROVIDER_DIR = os.path.join(ROOT_DIR, 'docs') +S3_DOC_URL = "http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com" + +# Customize build for readthedocs.io +# See: +# https://docs.readthedocs.io/en/stable/faq.html#how-do-i-change-behavior-when-building-with-read-the-docs +IS_RTD = os.environ.get('READTHEDOCS') == 'True' + + +def _create_init_py(app, config): + del app + # del config + intersphinx_mapping = getattr(config, 'intersphinx_mapping', None) or {} + + providers_mapping = _generate_provider_intersphinx_mapping() + intersphinx_mapping.update(providers_mapping) + + config.intersphinx_mapping = intersphinx_mapping + + +def _generate_provider_intersphinx_mapping(): + airflow_mapping = {} + for provider in load_package_data(): + package_name = provider['package-name'] + if os.environ.get('AIRFLOW_PACKAGE_NAME') == package_name: + continue + + # For local build and S3, use relative URLS. + # For RTD, use absolute URLs + if IS_RTD: + provider_base_url = f"{S3_DOC_URL}/docs/{package_name}/latest/" + else: + provider_base_url = f'/docs/{package_name}/latest/' + + airflow_mapping[package_name] = ( + # base URI + provider_base_url, + # Index locations list + # If passed None, this will try to fetch the index from `[base_url]/objects.inv` + # If we pass a path containing `://` then we will try to index from the given address. + # Otherwise, it will try to read the local file + # + # In this case, the local index will be read. If unsuccessful, the remote index + # will be fetched. + ( + f'{DOCS_DIR}/_build/docs/{package_name}/latest/objects.inv', + f'{S3_DOC_URL}/docs/{package_name}/latest/objects.inv', + ), + ) + if os.environ.get('AIRFLOW_PACKAGE_NAME') != 'apache-airflow': + airflow_mapping['apache-airflow'] = ( + # base URI + '/docs/apache-airflow/latest/', + # Index locations list + # If passed None, this will try to fetch the index from `[base_url]/objects.inv` + # If we pass a path containing `://` then we will try to index from the given address. + # Otherwise, it will try to read the local file + # + # In this case, the local index will be read. If unsuccessful, the remote index + # will be fetched. + ( + f'{DOCS_DIR}/_build/docs/apache-airflow/latest/objects.inv', + 'https://airflow.readthedocs.io/en/latest/objects.inv', + ), + ) + + return airflow_mapping + + +def setup(app: Sphinx): + """Sets the plugin up""" + app.connect("config-inited", _create_init_py) + + return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True} + + +if __name__ == "__main__": + + def main(): + """A simple application that displays the roles available for Airflow documentation.""" + import concurrent.futures + import sys + + from sphinx.ext.intersphinx import fetch_inventory_group + + class _MockConfig: + intersphinx_timeout = None + intersphinx_cache_limit = 1 + tls_verify = False + user_agent = None + + class _MockApp: + srcdir = '' + config = _MockConfig() + + def warn(self, msg: str) -> None: + """Display warning""" + print(msg, file=sys.stderr) + + def fetch_inventories(intersphinx_mapping) -> Dict[str, Any]: + now = int(time.time()) + + cache: Dict[Any, Any] = {} + with concurrent.futures.ThreadPoolExecutor() as pool: + for name, (uri, invs) in intersphinx_mapping.values(): + pool.submit(fetch_inventory_group, name, uri, invs, cache, _MockApp(), now) + + inv_dict = {} + for uri, (name, now, invdata) in cache.items(): + del uri + del now + inv_dict[name] = invdata + return inv_dict + + def domain_and_object_type_to_role(domain: str, object_type: str) -> str: + if domain == 'py': + from sphinx.domains.python import PythonDomain + + role_name = PythonDomain.object_types[object_type].roles[0] + elif domain == 'std': + from sphinx.domains.std import StandardDomain + + role_name = StandardDomain.object_types[object_type].roles[0] + else: + role_name = object_type + return role_name + + def inspect_main(inv_data, name) -> None: + try: + for key in sorted(inv_data or {}): + for entry, _ in sorted(inv_data[key].items()): + domain, object_type = key.split(":") + role_name = domain_and_object_type_to_role(domain, object_type) + + print(f":{role_name}:`{name}:{entry}`") + except ValueError as exc: + print(exc.args[0] % exc.args[1:]) + except Exception as exc: # pylint: disable=broad-except + print('Unknown error: %r' % exc) + + provider_mapping = _generate_provider_intersphinx_mapping() + + for key, value in provider_mapping.copy().items(): + provider_mapping[key] = (key, value) + + inv_dict = fetch_inventories(provider_mapping) + + for name, inv_data in inv_dict.items(): + inspect_main(inv_data, name) + + import logging + + logging.basicConfig(level=logging.DEBUG) + main() diff --git a/docs/exts/docs_build/__init__.py b/docs/exts/docs_build/__init__.py new file mode 100644 index 0000000000000..dd26d4ff9adea --- /dev/null +++ b/docs/exts/docs_build/__init__.py @@ -0,0 +1,19 @@ +# flake8: noqa +# Disable Flake8 because of all the sphinx imports +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/docs/exts/docs_build/code_utils.py b/docs/exts/docs_build/code_utils.py new file mode 100644 index 0000000000000..e35c8a4443f85 --- /dev/null +++ b/docs/exts/docs_build/code_utils.py @@ -0,0 +1,62 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from contextlib import suppress + + +def prepare_code_snippet(file_path: str, line_no: int, context_lines_count: int = 5) -> str: + """ + Prepares code snippet. + :param file_path: file path + :param line_no: line number + :param context_lines_count: number of lines of context. + :return: + """ + + def guess_lexer_for_filename(filename): + from pygments.lexers import get_lexer_for_filename + from pygments.util import ClassNotFound + + try: + lexer = get_lexer_for_filename(filename) + except ClassNotFound: + from pygments.lexers.special import TextLexer + + lexer = TextLexer() + return lexer + + with open(file_path) as text_file: + # Highlight code + code = text_file.read() + with suppress(ImportError): + import pygments + from pygments.formatters.terminal import TerminalFormatter + + code = pygments.highlight( + code=code, formatter=TerminalFormatter(), lexer=guess_lexer_for_filename(file_path) + ) + + code_lines = code.split("\n") + # Prepend line number + code_lines = [f"{line_no:4} | {line}" for line_no, line in enumerate(code_lines, 1)] + # # Cut out the snippet + start_line_no = max(0, line_no - context_lines_count) + end_line_no = line_no + context_lines_count + code_lines = code_lines[start_line_no:end_line_no] + # Join lines + code = "\n".join(code_lines) + return code diff --git a/docs/exts/docs_build/dev_index_generator.py b/docs/exts/docs_build/dev_index_generator.py new file mode 100644 index 0000000000000..800f4de48a927 --- /dev/null +++ b/docs/exts/docs_build/dev_index_generator.py @@ -0,0 +1,80 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import argparse +import os +import sys +from glob import glob + +import jinja2 + +from docs.exts.provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module + +CURRENT_DIR = os.path.abspath(os.path.dirname(__file__)) +DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir)) +BUILD_DIR = os.path.abspath(os.path.join(DOCS_DIR, '_build')) +ALL_PROVIDER_YAMLS = load_package_data() + + +def _get_jinja_env(): + loader = jinja2.FileSystemLoader(CURRENT_DIR, followlinks=True) + env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined) + return env + + +def _render_template(template_name, **kwargs): + return _get_jinja_env().get_template(template_name).render(**kwargs) + + +def _render_content(): + provider_packages = [ + os.path.basename(os.path.dirname(p)) for p in glob(f"{BUILD_DIR}/docs/apache-airflow-providers-*/") + ] + providers = [] + for package_name in provider_packages: + try: + current_provider = next( + provider_yaml + for provider_yaml in ALL_PROVIDER_YAMLS + if provider_yaml['package-name'] == package_name + ) + providers.append(current_provider) + except StopIteration: + raise Exception(f"Could not find provider.yaml file for package: {package_name}") + + content = _render_template( + 'dev_index_template.html.jinja2', providers=sorted(providers, key=lambda k: k['package-name']) + ) + return content + + +def generate_index(out_file: str) -> None: + """ + Generates an index for development documentation. + + :param out_file: The path where the index should be stored + """ + content = _render_content() + with open(out_file, "w") as output_file: + output_file.write(content) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout) + args = parser.parse_args() + args.outfile.write(_render_content()) diff --git a/docs/exts/docs_build/dev_index_template.html.jinja2 b/docs/exts/docs_build/dev_index_template.html.jinja2 new file mode 100644 index 0000000000000..e8c7f0d1f87c6 --- /dev/null +++ b/docs/exts/docs_build/dev_index_template.html.jinja2 @@ -0,0 +1,54 @@ + + + + + + + + + + + Apache Airflow documentation + + + +
+
+ This is the documentation for the Apache Airflow developer version. + For documentation for stable versions, see: airflow.apache.org. +
+ +

Apache Airflow Documentation

+
+
+

apache-airflow

+

+ Apache Airflow Core, which includes webserver, scheduler, CLI and other components that are needed for minimal Airflow installation. +

+
+
+ +
+
+
+
+

Providers packages

+

+ Providers packages include integrations with third party integrations. They are updated independently of the Apache Airflow core. +

+ +
+
+
+ + diff --git a/docs/exts/docs_build/errors.py b/docs/exts/docs_build/errors.py new file mode 100644 index 0000000000000..8a248b3a0eb42 --- /dev/null +++ b/docs/exts/docs_build/errors.py @@ -0,0 +1,104 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import os +from functools import total_ordering +from typing import Dict, List, NamedTuple, Optional + +from airflow.utils.code_utils import prepare_code_snippet + +CURRENT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) +DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir)) + + +@total_ordering +class DocBuildError(NamedTuple): + """Errors found in docs build.""" + + file_path: Optional[str] + line_no: Optional[int] + message: str + + def __eq__(self, other): + left = (self.file_path, self.line_no, self.message) + right = (other.file_path, other.line_no, other.message) + return left == right + + def __ne__(self, other): + return not self == other + + def __lt__(self, right): + file_path_a = self.file_path or '' + file_path_b = right.file_path or '' + line_no_a = self.line_no or 0 + line_no_b = right.line_no or 0 + left = (file_path_a, line_no_a, self.message) + right = (file_path_b, line_no_b, right.message) + return left < right + + +def display_errors_summary(build_errors: Dict[str, List[DocBuildError]]) -> None: + """Displays summary of errors""" + print("#" * 20, "Docs build errors summary", "#" * 20) + + for package_name, errors in build_errors.items(): + if package_name: + print("=" * 20, package_name, "=" * 20) + else: + print("=" * 20, "General", "=" * 20) + for warning_no, error in enumerate(sorted(errors), 1): + print("-" * 20, f"Error {warning_no:3}", "-" * 20) + print(error.message) + print() + if error.file_path and error.line_no: + print(f"File path: {os.path.relpath(error.file_path, start=DOCS_DIR)} ({error.line_no})") + print() + print(prepare_code_snippet(error.file_path, error.line_no)) + elif error.file_path: + print(f"File path: {error.file_path}") + + print("#" * 50) + + +def parse_sphinx_warnings(warning_text: str, docs_dir: str) -> List[DocBuildError]: + """ + Parses warnings from Sphinx. + + :param warning_text: warning to parse + :return: list of DocBuildErrors. + """ + sphinx_build_errors = [] + for sphinx_warning in warning_text.split("\n"): + if not sphinx_warning: + continue + warning_parts = sphinx_warning.split(":", 2) + if len(warning_parts) == 3: + try: + sphinx_build_errors.append( + DocBuildError( + file_path=os.path.join(docs_dir, warning_parts[0]), + line_no=int(warning_parts[1]), + message=warning_parts[2], + ) + ) + except Exception: # noqa pylint: disable=broad-except + # If an exception occurred while parsing the warning message, display the raw warning message. + sphinx_build_errors.append( + DocBuildError(file_path=None, line_no=None, message=sphinx_warning) + ) + else: + sphinx_build_errors.append(DocBuildError(file_path=None, line_no=None, message=sphinx_warning)) + return sphinx_build_errors diff --git a/docs/exts/docs_build/lint_checks.py b/docs/exts/docs_build/lint_checks.py new file mode 100644 index 0000000000000..1049cc02fd9e6 --- /dev/null +++ b/docs/exts/docs_build/lint_checks.py @@ -0,0 +1,228 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import ast +import os +import re +from glob import glob +from itertools import chain +from typing import Iterable, List, Optional, Set + +from docs.exts.docs_build.errors import DocBuildError # pylint: disable=no-name-in-module + +ROOT_PROJECT_DIR = os.path.abspath( + os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir) +) +ROOT_PACKAGE_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow") +DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs") + + +def find_existing_guide_operator_names(src_dir: str) -> Set[str]: + """ + Find names of existing operators. + :return names of existing operators. + """ + operator_names = set() + + paths = glob(f"{src_dir}/**/*.rst", recursive=True) + for path in paths: + with open(path) as f: + operator_names |= set(re.findall(".. _howto/operator:(.+?):", f.read())) + + return operator_names + + +def extract_ast_class_def_by_name(ast_tree, class_name): + """ + Extracts class definition by name + :param ast_tree: AST tree + :param class_name: name of the class. + :return: class node found + """ + + class ClassVisitor(ast.NodeVisitor): + """Visitor.""" + + def __init__(self): + self.found_class_node = None + + def visit_ClassDef(self, node): # pylint: disable=invalid-name + """ + Visit class definition. + :param node: node. + :return: + """ + if node.name == class_name: + self.found_class_node = node + + visitor = ClassVisitor() + visitor.visit(ast_tree) + + return visitor.found_class_node + + +def check_guide_links_in_operator_descriptions() -> List[DocBuildError]: + """Check if there are links to guides in operator's descriptions.""" + # TODO: We should also check the guides in the provider documentations. + # For now, we are only checking the core documentation. + # This is easiest to do after the content has been fully migrated. + build_errors = [] + + def generate_build_error(path, line_no, operator_name): + return DocBuildError( + package_name=None, + file_path=path, + line_no=line_no, + message=( + f"Link to the guide is missing in operator's description: {operator_name}.\n" + f"Please add link to the guide to the description in the following form:\n" + f"\n" + f".. seealso::\n" + f" For more information on how to use this operator, take a look at the guide:\n" + f" :ref:`apache-airflow:howto/operator:{operator_name}`\n" + ), + ) + + # Extract operators for which there are existing .rst guides + operator_names = find_existing_guide_operator_names(f"{DOCS_DIR}/howto/operator") + + # Extract all potential python modules that can contain operators + python_module_paths = chain( + glob(f"{ROOT_PACKAGE_DIR}/operators/*.py"), + glob(f"{ROOT_PACKAGE_DIR}/sensors/*.py"), + glob(f"{ROOT_PACKAGE_DIR}/providers/**/operators/*.py", recursive=True), + glob(f"{ROOT_PACKAGE_DIR}/providers/**/sensors/*.py", recursive=True), + glob(f"{ROOT_PACKAGE_DIR}/providers/**/transfers/*.py", recursive=True), + ) + + for py_module_path in python_module_paths: + with open(py_module_path) as f: + py_content = f.read() + + if "This module is deprecated" in py_content: + continue + + for existing_operator in operator_names: + if f"class {existing_operator}" not in py_content: + continue + # This is a potential file with necessary class definition. + # To make sure it's a real Python class definition, we build AST tree + ast_tree = ast.parse(py_content) + class_def = extract_ast_class_def_by_name(ast_tree, existing_operator) + + if class_def is None: + continue + + docstring = ast.get_docstring(class_def) + if "This class is deprecated." in docstring: + continue + + if f":ref:`apache-airflow:howto/operator:{existing_operator}`" in ast.get_docstring( + class_def + ) or f":ref:`howto/operator:{existing_operator}`" in ast.get_docstring(class_def): + continue + + build_errors.append(generate_build_error(py_module_path, class_def.lineno, existing_operator)) + return build_errors + + +def assert_file_not_contains(file_path: str, pattern: str, message: str) -> Optional[DocBuildError]: + """ + Asserts that file does not contain the pattern. Return message error if it does. + :param file_path: file + :param pattern: pattern + :param message: message to return + """ + with open(file_path, "rb", 0) as doc_file: + pattern_compiled = re.compile(pattern) + + for num, line in enumerate(doc_file, 1): + line_decode = line.decode() + if re.search(pattern_compiled, line_decode): + return DocBuildError(file_path=file_path, line_no=num, message=message) + return None + + +def filter_file_list_by_pattern(file_paths: Iterable[str], pattern: str) -> List[str]: + """ + Filters file list to those tha content matches the pattern + :param file_paths: file paths to check + :param pattern: pattern to match + :return: list of files matching the pattern + """ + output_paths = [] + pattern_compiled = re.compile(pattern) + for file_path in file_paths: + with open(file_path, "rb", 0) as text_file: + text_file_content = text_file.read().decode() + if re.findall(pattern_compiled, text_file_content): + output_paths.append(file_path) + return output_paths + + +def find_modules(deprecated_only: bool = False) -> Set[str]: + """ + Finds all modules. + :param deprecated_only: whether only deprecated modules should be found. + :return: set of all modules found + """ + file_paths = glob(f"{ROOT_PACKAGE_DIR}/**/*.py", recursive=True) + # Exclude __init__.py + file_paths = [f for f in file_paths if not f.endswith("__init__.py")] + if deprecated_only: + file_paths = filter_file_list_by_pattern(file_paths, r"This module is deprecated.") + # Make path relative + file_paths = [os.path.relpath(f, ROOT_PROJECT_DIR) for f in file_paths] + # Convert filename to module + modules_names = {file_path.rpartition(".")[0].replace("/", ".") for file_path in file_paths} + return modules_names + + +def check_exampleinclude_for_example_dags() -> List[DocBuildError]: + """Checks all exampleincludes for example dags.""" + all_docs_files = glob(f"${DOCS_DIR}/**/*rst", recursive=True) + build_errors = [] + for doc_file in all_docs_files: + build_error = assert_file_not_contains( + file_path=doc_file, + pattern=r"literalinclude::.+example_dags", + message=( + "literalinclude directive is prohibited for example DAGs. \n" + "You should use the exampleinclude directive to include example DAGs." + ), + ) + if build_error: + build_errors.append(build_error) + return build_errors + + +def check_enforce_code_block() -> List[DocBuildError]: + """Checks all code:: blocks.""" + all_docs_files = glob(f"{DOCS_DIR}/**/*rst", recursive=True) + build_errors = [] + for doc_file in all_docs_files: + build_error = assert_file_not_contains( + file_path=doc_file, + pattern=r"^.. code::", + message=( + "We recommend using the code-block directive instead of the code directive. " + "The code-block directive is more feature-full." + ), + ) + if build_error: + build_errors.append(build_error) + return build_errors diff --git a/docs/exts/docs_build/spelling_checks.py b/docs/exts/docs_build/spelling_checks.py new file mode 100644 index 0000000000000..c2b7ca93bad15 --- /dev/null +++ b/docs/exts/docs_build/spelling_checks.py @@ -0,0 +1,172 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import re +from functools import total_ordering +from typing import Dict, List, NamedTuple, Optional + +from airflow.utils.code_utils import prepare_code_snippet + +CURRENT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) +DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir)) + + +@total_ordering +class SpellingError(NamedTuple): + """Spelling errors found when building docs.""" + + file_path: Optional[str] + line_no: Optional[int] + spelling: Optional[str] + suggestion: Optional[str] + context_line: Optional[str] + message: str + + def __eq__(self, other): + left = ( + self.file_path, + self.line_no, + self.spelling, + self.context_line, + self.message, + ) + right = ( + other.file_path, + other.line_no, + other.spelling, + other.context_line, + other.message, + ) + return left == right + + def __ne__(self, other): + return not self == other + + def __lt__(self, other): + file_path_a = self.file_path or '' + file_path_b = other.file_path or '' + line_no_a = self.line_no or 0 + line_no_b = other.line_no or 0 + context_line_a = self.context_line or '' + context_line_b = other.context_line or '' + left = (file_path_a, line_no_a, context_line_a, self.spelling, self.message) + right = ( + file_path_b, + line_no_b, + context_line_b, + other.spelling, + other.message, + ) + return left < right + + +def parse_spelling_warnings(warning_text: str, docs_dir) -> List[SpellingError]: + """ + Parses warnings from Sphinx. + + :param warning_text: warning to parse + :return: list of SpellingError. + """ + sphinx_spelling_errors = [] + for sphinx_warning in warning_text.split("\n"): + if not sphinx_warning: + continue + warning_parts = None + match = re.search(r"(.*):(\w*):\s\((\w*)\)\s?(\w*)\s?(.*)", sphinx_warning) + if match: + warning_parts = match.groups() + if warning_parts and len(warning_parts) == 5: + try: + sphinx_spelling_errors.append( + SpellingError( + file_path=os.path.join(docs_dir, warning_parts[0]), + line_no=int(warning_parts[1]) if warning_parts[1] not in ('None', '') else None, + spelling=warning_parts[2], + suggestion=warning_parts[3] if warning_parts[3] else None, + context_line=warning_parts[4], + message=sphinx_warning, + ) + ) + except Exception: # noqa pylint: disable=broad-except + # If an exception occurred while parsing the warning message, display the raw warning message. + sphinx_spelling_errors.append( + SpellingError( + file_path=None, + line_no=None, + spelling=None, + suggestion=None, + context_line=None, + message=sphinx_warning, + ) + ) + else: + sphinx_spelling_errors.append( + SpellingError( + file_path=None, + line_no=None, + spelling=None, + suggestion=None, + context_line=None, + message=sphinx_warning, + ) + ) + return sphinx_spelling_errors + + +def display_spelling_error_summary(spelling_errors: Dict[str, List[SpellingError]]) -> None: + """Displays summary of Spelling errors""" + print("#" * 20, "Spelling errors summary", "#" * 20) + + for package_name, errors in sorted(spelling_errors.items()): + if package_name: + print("=" * 20, package_name, "=" * 20) + else: + print("=" * 20, "General", "=" * 20) + + for warning_no, error in enumerate(sorted(errors), 1): + print("-" * 20, f"Error {warning_no:3}", "-" * 20) + + _display_error(error) + + print("=" * 50) + print() + msg = """ +If the spelling is correct, add the spelling to docs/spelling_wordlist.txt +or use the spelling directive. +Check https://sphinxcontrib-spelling.readthedocs.io/en/latest/customize.html#private-dictionaries +for more details. + """ + print(msg) + print() + print("#" * 50) + + +def _display_error(error: SpellingError): + print(error.message) + print() + if error.file_path: + print(f"File path: {os.path.relpath(error.file_path, start=DOCS_DIR)}") + if error.spelling: + print(f"Incorrect Spelling: '{error.spelling}'") + if error.suggestion: + print(f"Suggested Spelling: '{error.suggestion}'") + if error.context_line: + print(f"Line with Error: '{error.context_line}'") + if error.line_no: + print(f"Line Number: {error.line_no}") + print(prepare_code_snippet(error.file_path, error.line_no)) diff --git a/docs/exts/operators_and_hooks_ref.py b/docs/exts/operators_and_hooks_ref.py index 04ee01ea871dd..fccc99197f055 100644 --- a/docs/exts/operators_and_hooks_ref.py +++ b/docs/exts/operators_and_hooks_ref.py @@ -67,11 +67,16 @@ def _docs_path(filepath: str): if not filepath.endswith(".rst"): raise Exception(f"The path must ends with '.rst'. Current value: {filepath}") + if filepath.startswith("/docs/apache-airflow-providers-"): + _, _, provider, rest = filepath.split("/", maxsplit=3) + filepath = f"{provider}:{rest}" + else: + filepath = os.path.join(ROOT_DIR, filepath.lstrip('/')) + filepath = os.path.relpath(filepath, DOCS_DIR) + len_rst = len(".rst") filepath = filepath[:-len_rst] - filepath = os.path.join(ROOT_DIR, filepath.lstrip('/')) - - return os.path.relpath(filepath, DOCS_DIR) + return filepath def _prepare_resource_index(package_data, resource_type): @@ -249,7 +254,7 @@ def setup(app): parser_b.set_defaults(cmd=CMD_TRANSFERS) args = parser.parse_args() - print(args) + if args.cmd == CMD_OPERATORS_AND_HOOKS: content = _render_operator_content( tags=set(args.tags) if args.tags else None, header_separator=args.header_separator diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py index 5e9b1ae25d333..130084cdf87ad 100644 --- a/docs/exts/provider_yaml_utils.py +++ b/docs/exts/provider_yaml_utils.py @@ -43,7 +43,7 @@ def get_provider_yaml_paths(): return sorted(glob(f"{ROOT_DIR}/airflow/providers/**/provider.yaml", recursive=True)) -def load_package_data() -> List[Dict[str, Dict]]: +def load_package_data() -> List[Dict[str, Any]]: """ Load all data from providers files @@ -59,5 +59,6 @@ def load_package_data() -> List[Dict[str, Dict]]: except jsonschema.ValidationError: raise Exception(f"Unable to parse: {provider_yaml_path}.") provider['python-module'] = _filepath_to_module(os.path.dirname(provider_yaml_path)) + provider['package-dir'] = os.path.dirname(provider_yaml_path) result.append(provider) return result diff --git a/docs/exts/redirects.py b/docs/exts/redirects.py index 8a06c42aa18b2..beca2cb1d2b54 100644 --- a/docs/exts/redirects.py +++ b/docs/exts/redirects.py @@ -35,9 +35,6 @@ def generate_redirects(app): in_suffix = next(iter(app.config.source_suffix.keys())) if not isinstance(app.builder, builders.StandaloneHTMLBuilder): - log.warning( - f"The plugin supports only 'html' builder, but you are using '{type(app.builder)}'. Skipping..." - ) return with open(redirect_file_path) as redirects: diff --git a/docs/exts/sphinx_script_update.py b/docs/exts/sphinx_script_update.py index 114287b89f9be..2fa3b527ae5bb 100644 --- a/docs/exts/sphinx_script_update.py +++ b/docs/exts/sphinx_script_update.py @@ -93,11 +93,6 @@ def fetch_and_cache(script_url: str, output_filename: str): def builder_inited(app): """Sphinx "builder-inited" event handler.""" - if not isinstance(app.builder, builders.StandaloneHTMLBuilder): - log.warning( - F"The plugin is support only 'html' builder, but you are using '{type(app.builder)}'. Skipping..." - ) - return script_url = app.config.redoc_script_url output_filename = "script.js" diff --git a/docs/howto/operator/index.rst b/docs/howto/operator/index.rst index 3015a5964582d..4d4b04a4275f2 100644 --- a/docs/howto/operator/index.rst +++ b/docs/howto/operator/index.rst @@ -36,7 +36,6 @@ determine what actually executes when your DAG runs. apache/index databricks dingding - google/index http jdbc kubernetes diff --git a/docs/howto/operator/kubernetes.rst b/docs/howto/operator/kubernetes.rst index a5569196541b5..4a07f67e5778b 100644 --- a/docs/howto/operator/kubernetes.rst +++ b/docs/howto/operator/kubernetes.rst @@ -36,7 +36,7 @@ you to create and run Pods on a Kubernetes cluster. simplifies the Kubernetes authorization process. .. note:: - The :doc:`Kubernetes executor <../../../executor/kubernetes>` is **not** required to use this operator. + The :doc:`Kubernetes executor ` is **not** required to use this operator. How does this operator work? ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/list-roles.sh b/docs/list-roles.sh new file mode 100644 index 0000000000000..6f37dd438ed78 --- /dev/null +++ b/docs/list-roles.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail + +DOCS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +readonly DOCS_DIR + +cd "$DOCS_DIR" + +PYTHONPATH="$PWD" python3 exts/airflow_intersphinx.py "$@" diff --git a/docs/logging-monitoring/logging-tasks.rst b/docs/logging-monitoring/logging-tasks.rst index 7ec6765c39928..aaec284b0a69a 100644 --- a/docs/logging-monitoring/logging-tasks.rst +++ b/docs/logging-monitoring/logging-tasks.rst @@ -195,47 +195,6 @@ Follow the steps below to enable Azure Blob Storage logging: #. Restart the Airflow webserver and scheduler, and trigger (or wait for) a new task execution. #. Verify that logs are showing up for newly executed tasks in the bucket you've defined. -.. _write-logs-gcp: - -Writing Logs to Google Cloud Storage ------------------------------------- - -Remote logging to Google Cloud Storage uses an existing Airflow connection to read or write logs. If you -don't have a connection properly setup, this process will fail. - -Follow the steps below to enable Google Cloud Storage logging. - -To enable this feature, ``airflow.cfg`` must be configured as in this -example: - -.. code-block:: ini - - [logging] - # Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. - # Users must supply an Airflow connection id that provides access to the storage - # location. If remote_logging is set to true, see UPDATING.md for additional - # configuration requirements. - remote_logging = True - remote_base_log_folder = gs://my-bucket/path/to/logs - -#. By default Application Default Credentials are used to obtain credentials. You can also - set ``google_key_path`` option in ``[logging]`` section, if you want to use your own service account. -#. Make sure a Google Cloud account have read and write access to the Google Cloud Storage bucket defined above in ``remote_base_log_folder``. -#. Install the ``google`` package, like so: ``pip install 'apache-airflow[google]'``. -#. Restart the Airflow webserver and scheduler, and trigger (or wait for) a new task execution. -#. Verify that logs are showing up for newly executed tasks in the bucket you've defined. -#. Verify that the Google Cloud Storage viewer is working in the UI. Pull up a newly executed task, and verify that you see something like: - -.. code-block:: none - - *** Reading remote log from gs:///example_bash_operator/run_this_last/2017-10-03T00:00:00/16.log. - [2017-10-03 21:57:50,056] {cli.py:377} INFO - Running on host chrisr-00532 - [2017-10-03 21:57:50,093] {base_task_runner.py:115} INFO - Running: ['bash', '-c', 'airflow tasks run example_bash_operator run_this_last 2017-10-03T00:00:00 --job-id 47 --raw -S DAGS_FOLDER/example_dags/example_bash_operator.py'] - [2017-10-03 21:57:51,264] {base_task_runner.py:98} INFO - Subtask: [2017-10-03 21:57:51,263] {__init__.py:45} INFO - Using executor SequentialExecutor - [2017-10-03 21:57:51,306] {base_task_runner.py:98} INFO - Subtask: [2017-10-03 21:57:51,306] {models.py:186} INFO - Filling up the DagBag from /airflow/dags/example_dags/example_bash_operator.py - -**Note** that the path to the remote log file is listed on the first line. - .. _write-logs-elasticsearch: Writing Logs to Elasticsearch @@ -306,45 +265,6 @@ cert, etc.) use the ``elasticsearch_configs`` setting in your ``airflow.cfg`` ca_certs=/path/to/CA_certs -.. _write-logs-stackdriver: - -Writing Logs to Google Stackdriver ----------------------------------- - -Airflow can be configured to read and write task logs in `Google Stackdriver Logging `__. - -To enable this feature, ``airflow.cfg`` must be configured as in this -example: - -.. code-block:: ini - - [logging] - # Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search. - # Users must supply an Airflow connection id that provides access to the storage - # location. If remote_logging is set to true, see UPDATING.md for additional - # configuration requirements. - remote_logging = True - remote_base_log_folder = stackdriver://logs-name - -All configuration options are in the ``[logging]`` section. - -The value of field ``remote_logging`` must always be set to ``True`` for this feature to work. -Turning this option off will result in data not being sent to Stackdriver. -The ``remote_base_log_folder`` option contains the URL that specifies the type of handler to be used. -For integration with Stackdriver, this option should start with ``stackdriver:///``. -The path section of the URL specifies the name of the log e.g. ``stackdriver://airflow-tasks`` writes -logs under the name ``airflow-tasks``. - -You can set ``google_key_path`` option in the ``[logging]`` section to specify the path to `the service -account key file `__. -If omitted, authorization based on `the Application Default Credentials -`__ will -be used. - -By using the ``logging_config_class`` option you can get :ref:`advanced features ` of -this handler. Details are available in the handler's documentation - -:class:`~airflow.providers.google.cloud.log.stackdriver_task_handler.StackdriverTaskHandler`. - External Links -------------- @@ -368,10 +288,3 @@ To enable it, ``airflow.cfg`` must be configured as in the example below. Note t # Code will construct log_id using the log_id template from the argument above. # NOTE: The code will prefix the https:// automatically, don't include that here. frontend = /{log_id} - -.. _log-link-stackdriver: - -Google Stackdriver External Link -'''''''''''''''''''''''''''''''' - -Airflow automatically shows a link to Google Stackdriver when configured to use it as the remote logging system. diff --git a/docs/operators-and-hooks-ref.rst b/docs/operators-and-hooks-ref.rst index 5ab79288a4196..d735ee7c115fb 100644 --- a/docs/operators-and-hooks-ref.rst +++ b/docs/operators-and-hooks-ref.rst @@ -228,7 +228,7 @@ Airflow has support for the `Google service `__. All hooks are based on :class:`airflow.providers.google.common.hooks.base_google.GoogleBaseHook`. Some integration also use :mod:`airflow.providers.google.common.hooks.discovery_api`. -See the :doc:`Google Cloud connection type ` documentation to +See the :doc:`Google Cloud connection type ` documentation to configure connections to Google services. .. _GCP: diff --git a/docs/security/api.rst b/docs/security/api.rst index 59da90c84bd22..73671159bc4c4 100644 --- a/docs/security/api.rst +++ b/docs/security/api.rst @@ -77,59 +77,6 @@ To enable Kerberos authentication, set the following in the configuration: The Kerberos service is configured as ``airflow/fully.qualified.domainname@REALM``. Make sure this principal exists in the keytab file. -Google OpenID authentication -'''''''''''''''''''''''''''' - -You can also configure -`Google OpenID `__ -for authentication. To enable it, set the following option in the configuration: - -.. code-block:: ini - - [api] - auth_backend = airflow.providers.google.common.auth_backend.google_openid - -It is also highly recommended to configure an OAuth2 audience so that the generated tokens are restricted to -use by Airflow only. - -.. code-block:: ini - - [api] - google_oauth2_audience = project-id-random-value.apps.googleusercontent.com - -You can also configure the CLI to send request to a remote API instead of making a query to a local database. - -.. code-block:: ini - - [cli] - api_client = airflow.api.client.json_client - endpoint_url = http://remote-host.example.org/ - -You can also set up a service account key. If omitted, authorization based on `the Application Default -Credentials `__ -will be used. - -.. code-block:: ini - - [cli] - google_key_path = - -You can get the authorization token with the ``gcloud auth print-identity-token`` command. An example request -look like the following. - - .. code-block:: bash - - ENDPOINT_URL="http://locahost:8080/" - - AUDIENCE="project-id-random-value.apps.googleusercontent.com" - ID_TOKEN="$(gcloud auth print-identity-token "--audience=${AUDIENCE}")" - - curl -X GET \ - "${ENDPOINT_URL}/api/experimental/pools" \ - -H 'Content-Type: application/json' \ - -H 'Cache-Control: no-cache' \ - -H "Authorization: Bearer ${ID_TOKEN}" - Basic authentication '''''''''''''''''''' diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 6e6ec2d55d696..f3a2c7d876d5a 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -1004,6 +1004,7 @@ orchestrator orgtbl os ot +optionality overridable oversubscription pagerduty @@ -1311,6 +1312,7 @@ unittests unix unpause unpausing +unpredicted unqueued unterminated unutilized diff --git a/docs/start_doc_server.sh b/docs/start_doc_server.sh index 1368aad12b6bc..28397c59ff524 100755 --- a/docs/start_doc_server.sh +++ b/docs/start_doc_server.sh @@ -19,6 +19,6 @@ DOCS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" readonly DOCS_DIR -(cd "${DOCS_DIR}"/_build/html || exit; +(cd "${DOCS_DIR}"/_build || exit; python -m http.server 8000 ) diff --git a/docs/templates/layout.html b/docs/templates/layout.html deleted file mode 100644 index 4884a1f60e68f..0000000000000 --- a/docs/templates/layout.html +++ /dev/null @@ -1,37 +0,0 @@ -{# - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. -#} - -{# -JavaScript to render AIRFLOW-XXX and PR references in text -as HTML links. - -Overrides extrahead block from sphinx_rtd_theme -https://www.sphinx-doc.org/en/master/templating.html -#} - -{% extends "!layout.html" %} - -{% block extrahead %} - {{ super() }} - - -{% endblock %} diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py index fd748016119ac..b01717fb75085 100755 --- a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py +++ b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py @@ -251,6 +251,12 @@ def check_doc_files(yaml_files: Dict[str, Dict]): for f in glob(f"{DOCS_DIR}/howto/operator/**/*.rst", recursive=True) if not f.endswith("/index.rst") and '/_partials' not in f } + expected_doc_urls |= { + "/docs/" + os.path.relpath(f, start=DOCS_DIR) + for f in glob(f"{DOCS_DIR}//apache-airflow-providers-*/operators/**/*.rst", recursive=True) + if not f.endswith("/index.rst") and '/_partials' not in f + } + expected_doc_urls -= DOC_FILES_EXCLUDE_LIST try: assert_sets_equal(set(expected_doc_urls), set(current_doc_urls)) diff --git a/scripts/in_container/run_docs_build.sh b/scripts/in_container/run_docs_build.sh index ef3f986810988..bcf94b9c8007e 100755 --- a/scripts/in_container/run_docs_build.sh +++ b/scripts/in_container/run_docs_build.sh @@ -18,12 +18,9 @@ # shellcheck source=scripts/in_container/_in_container_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" -sudo rm -rf "${AIRFLOW_SOURCES}/docs/_build/*" -sudo rm -rf "${AIRFLOW_SOURCES}/docs/_api/*" - sudo -E "${AIRFLOW_SOURCES}/docs/build_docs.py" "${@}" -if [[ ${CI:="false"} == "true" && -d "${AIRFLOW_SOURCES}/docs/_build/html" ]]; then +if [[ ${CI:="false"} == "true" && -d "${AIRFLOW_SOURCES}/docs/_build/docs/" ]]; then rm -rf "/files/documentation" - cp -r "${AIRFLOW_SOURCES}/docs/_build/html" "/files/documentation" + cp -r "${AIRFLOW_SOURCES}/docs/_build" "/files/documentation" fi diff --git a/tests/www/test_views.py b/tests/www/test_views.py index 87244a9c85a73..9208f328a2590 100644 --- a/tests/www/test_views.py +++ b/tests/www/test_views.py @@ -521,7 +521,9 @@ def test_index(self): def test_doc_urls(self): resp = self.client.get('/', follow_redirects=True) if "dev" in version.version: - airflow_doc_site = "https://airflow.readthedocs.io/en/latest" + airflow_doc_site = ( + "http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com/docs/apache-airflow/" + ) else: airflow_doc_site = f'https://airflow.apache.org/docs/{version.version}'