repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
airflow | airflow-main/airflow/providers/google/go_module_utils.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Utilities initializing and managing Go modules."""
from __future__ import annotations
import os
from airflow.utils.process_utils import execute_in_subprocess
def init_module(go_module_name: str, go_module_path: str) -> None:
"""Initialize a Go module.
If a ``go.mod`` file already exists, this function will do nothing.
:param go_module_name: The name of the Go module to initialize.
:param go_module_path: The path to the directory containing the Go module.
"""
if os.path.isfile(os.path.join(go_module_path, "go.mod")):
return
go_mod_init_cmd = ["go", "mod", "init", go_module_name]
execute_in_subprocess(go_mod_init_cmd, cwd=go_module_path)
def install_dependencies(go_module_path: str) -> None:
"""Install dependencies for a Go module.
:param go_module_path: The path to the directory containing the Go module.
"""
go_mod_tidy = ["go", "mod", "tidy"]
execute_in_subprocess(go_mod_tidy, cwd=go_module_path)
| 1,769 | 36.659574 | 78 | py |
airflow | airflow-main/airflow/providers/google/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "10.4.0"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-google:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,532 | 35.5 | 115 | py |
airflow | airflow-main/airflow/providers/google/ads/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/enums/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/enums/types/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/errors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/errors/types/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/common/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/common/types/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/services/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/services/services/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/services/services/customer_service/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/services/services/customer_service/transports/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/services/services/google_ads_service/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/services/services/google_ads_service/transports/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/services/types/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/resources/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/v12/resources/types/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/_vendor/googleads/interceptors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/transfers/ads_to_gcs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import csv
from operator import attrgetter
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Sequence
from airflow.models import BaseOperator
from airflow.providers.google.ads.hooks.ads import GoogleAdsHook
from airflow.providers.google.cloud.hooks.gcs import GCSHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleAdsToGcsOperator(BaseOperator):
"""Fetch daily results from the Google Ads API for 1-n clients.
Converts and saves the data as a temporary CSV file Uploads the CSV to
Google Cloud Storage.
.. seealso::
For more information on the Google Ads API, take a look at the API docs:
https://developers.google.com/google-ads/api/docs/start
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleAdsToGcsOperator`
:param client_ids: Google Ads client IDs to query
:param query: Google Ads Query Language API query
:param attributes: List of Google Ads Row attributes to extract
:param bucket: The GCS bucket to upload to
:param obj: GCS path to save the object. Must be the full file path (ex. `path/to/file.txt`)
:param gcp_conn_id: Airflow Google Cloud connection ID
:param google_ads_conn_id: Airflow Google Ads connection ID
:param page_size: The number of results per API page request. Max 10,000
:param gzip: Option to compress local file or file data for upload
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:param api_version: Optional Google Ads API version to use.
"""
template_fields: Sequence[str] = (
"client_ids",
"query",
"attributes",
"bucket",
"obj",
"impersonation_chain",
)
def __init__(
self,
*,
client_ids: list[str],
query: str,
attributes: list[str],
bucket: str,
obj: str,
gcp_conn_id: str = "google_cloud_default",
google_ads_conn_id: str = "google_ads_default",
page_size: int = 10000,
gzip: bool = False,
impersonation_chain: str | Sequence[str] | None = None,
api_version: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.client_ids = client_ids
self.query = query
self.attributes = attributes
self.bucket = bucket
self.obj = obj
self.gcp_conn_id = gcp_conn_id
self.google_ads_conn_id = google_ads_conn_id
self.page_size = page_size
self.gzip = gzip
self.impersonation_chain = impersonation_chain
self.api_version = api_version
def execute(self, context: Context) -> None:
service = GoogleAdsHook(
gcp_conn_id=self.gcp_conn_id,
google_ads_conn_id=self.google_ads_conn_id,
api_version=self.api_version,
)
rows = service.search(client_ids=self.client_ids, query=self.query, page_size=self.page_size)
try:
getter = attrgetter(*self.attributes)
converted_rows = [getter(row) for row in rows]
except Exception as e:
self.log.error("An error occurred in converting the Google Ad Rows. \n Error %s", e)
raise
with NamedTemporaryFile("w", suffix=".csv") as csvfile:
writer = csv.writer(csvfile)
writer.writerows(converted_rows)
csvfile.flush()
hook = GCSHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
hook.upload(
bucket_name=self.bucket,
object_name=self.obj,
filename=csvfile.name,
gzip=self.gzip,
)
self.log.info("%s uploaded to GCS", self.obj)
| 5,220 | 38.55303 | 102 | py |
airflow | airflow-main/airflow/providers/google/ads/transfers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/operators/ads.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Ad to GCS operators."""
from __future__ import annotations
import csv
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Sequence
from airflow.models import BaseOperator
from airflow.providers.google.ads.hooks.ads import GoogleAdsHook
from airflow.providers.google.cloud.hooks.gcs import GCSHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleAdsListAccountsOperator(BaseOperator):
"""
Saves list of customers on GCS in form of a csv file.
The resulting list of customers is based on your OAuth credentials. The request returns a list
of all accounts that you are able to act upon directly given your current credentials. This will
not necessarily include all accounts within the account hierarchy; rather, it will only include
accounts where your authenticated user has been added with admin or other rights in the account.
.. seealso::
https://developers.google.com/google-ads/api/reference/rpc
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleAdsListAccountsOperator`
:param bucket: The GCS bucket to upload to
:param object_name: GCS path to save the csv file. Must be the full file path (ex. `path/to/file.csv`)
:param gcp_conn_id: Airflow Google Cloud connection ID
:param google_ads_conn_id: Airflow Google Ads connection ID
:param gzip: Option to compress local file or file data for upload
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:param api_version: Optional Google Ads API version to use.
"""
template_fields: Sequence[str] = (
"bucket",
"object_name",
"impersonation_chain",
)
def __init__(
self,
*,
bucket: str,
object_name: str,
gcp_conn_id: str = "google_cloud_default",
google_ads_conn_id: str = "google_ads_default",
gzip: bool = False,
impersonation_chain: str | Sequence[str] | None = None,
api_version: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.bucket = bucket
self.object_name = object_name
self.gcp_conn_id = gcp_conn_id
self.google_ads_conn_id = google_ads_conn_id
self.gzip = gzip
self.impersonation_chain = impersonation_chain
self.api_version = api_version
def execute(self, context: Context) -> str:
uri = f"gs://{self.bucket}/{self.object_name}"
ads_hook = GoogleAdsHook(
gcp_conn_id=self.gcp_conn_id,
google_ads_conn_id=self.google_ads_conn_id,
api_version=self.api_version,
)
gcs_hook = GCSHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
with NamedTemporaryFile("w+") as temp_file:
# Download accounts
accounts = ads_hook.list_accessible_customers()
writer = csv.writer(temp_file)
writer.writerows(accounts)
temp_file.flush()
# Upload to GCS
gcs_hook.upload(
bucket_name=self.bucket, object_name=self.object_name, gzip=self.gzip, filename=temp_file.name
)
self.log.info("Uploaded %s to %s", len(accounts), uri)
return uri
| 4,743 | 39.547009 | 110 | py |
airflow | airflow-main/airflow/providers/google/ads/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/ads/hooks/ads.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Ad hook."""
from __future__ import annotations
from functools import cached_property
from tempfile import NamedTemporaryFile
from typing import IO, Any
from google.ads.googleads.client import GoogleAdsClient
from google.ads.googleads.errors import GoogleAdsException
from google.ads.googleads.v14.services.services.customer_service import CustomerServiceClient
from google.ads.googleads.v14.services.services.google_ads_service import GoogleAdsServiceClient
from google.ads.googleads.v14.services.types.google_ads_service import GoogleAdsRow
from google.api_core.page_iterator import GRPCIterator
from google.auth.exceptions import GoogleAuthError
from airflow import AirflowException
from airflow.hooks.base import BaseHook
from airflow.providers.google.common.hooks.base_google import get_field
class GoogleAdsHook(BaseHook):
"""Interact with Google Ads API.
This hook requires two connections:
- gcp_conn_id - provides service account details (like any other GCP connection)
- google_ads_conn_id - which contains information from Google Ads config.yaml file
in the ``extras``. Example of the ``extras``:
.. code-block:: json
{
"google_ads_client": {
"developer_token": "{{ INSERT_TOKEN }}",
"json_key_file_path": null,
"impersonated_email": "{{ INSERT_IMPERSONATED_EMAIL }}"
}
}
The ``json_key_file_path`` is resolved by the hook using credentials from gcp_conn_id.
https://developers.google.com/google-ads/api/docs/client-libs/python/oauth-service
.. seealso::
For more information on how Google Ads authentication flow works take a look at:
https://developers.google.com/google-ads/api/docs/client-libs/python/oauth-service
.. seealso::
For more information on the Google Ads API, take a look at the API docs:
https://developers.google.com/google-ads/api/docs/start
:param gcp_conn_id: The connection ID with the service account details.
:param google_ads_conn_id: The connection ID with the details of Google Ads config.yaml file.
:param api_version: The Google Ads API version to use.
"""
default_api_version = "v14"
def __init__(
self,
api_version: str | None,
gcp_conn_id: str = "google_cloud_default",
google_ads_conn_id: str = "google_ads_default",
) -> None:
super().__init__()
self.api_version = api_version or self.default_api_version
self.gcp_conn_id = gcp_conn_id
self.google_ads_conn_id = google_ads_conn_id
self.google_ads_config: dict[str, Any] = {}
def search(
self, client_ids: list[str], query: str, page_size: int = 10000, **kwargs
) -> list[GoogleAdsRow]:
"""Pull data from the Google Ads API.
Native protobuf message instances are returned (those seen in versions
prior to 10.0.0 of the google-ads library).
This method is for backwards compatibility with older versions of the
google_ads_hook.
Check out the search_proto_plus method to get API results in the new
default format of the google-ads library since v10.0.0 that behave
more like conventional python object (using proto-plus-python).
:param client_ids: Google Ads client ID(s) to query the API for.
:param query: Google Ads Query Language query.
:param page_size: Number of results to return per page. Max 10000.
:return: Google Ads API response, converted to Google Ads Row objects.
"""
data_proto_plus = self._search(client_ids, query, page_size, **kwargs)
data_native_pb = [row._pb for row in data_proto_plus]
return data_native_pb
def search_proto_plus(
self, client_ids: list[str], query: str, page_size: int = 10000, **kwargs
) -> list[GoogleAdsRow]:
"""Pull data from the Google Ads API.
Instances of proto-plus-python message are returned, which behave more
like conventional Python objects.
:param client_ids: Google Ads client ID(s) to query the API for.
:param query: Google Ads Query Language query.
:param page_size: Number of results to return per page. Max 10000.
:return: Google Ads API response, converted to Google Ads Row objects
"""
return self._search(client_ids, query, page_size, **kwargs)
def list_accessible_customers(self) -> list[str]:
"""List resource names of customers.
The resulting list of customers is based on your OAuth credentials. The
request returns a list of all accounts that you are able to act upon
directly given your current credentials. This will not necessarily
include all accounts within the account hierarchy; rather, it will only
include accounts where your authenticated user has been added with admin
or other rights in the account.
..seealso::
https://developers.google.com/google-ads/api/reference/rpc
:return: List of names of customers
"""
try:
accessible_customers = self._get_customer_service.list_accessible_customers()
return accessible_customers.resource_names
except GoogleAdsException as ex:
for error in ex.failure.errors:
self.log.error('\tError with message "%s".', error.message)
if error.location:
for field_path_element in error.location.field_path_elements:
self.log.error("\t\tOn field: %s", field_path_element.field_name)
raise
@cached_property
def _get_service(self) -> GoogleAdsServiceClient:
"""Connect and authenticate with the Google Ads API using a service account."""
client = self._get_client
return client.get_service("GoogleAdsService", version=self.api_version)
@cached_property
def _get_client(self) -> GoogleAdsClient:
with NamedTemporaryFile("w", suffix=".json") as secrets_temp:
self._get_config()
self._update_config_with_secret(secrets_temp)
try:
client = GoogleAdsClient.load_from_dict(self.google_ads_config)
return client
except GoogleAuthError as e:
self.log.error("Google Auth Error: %s", e)
raise
@cached_property
def _get_customer_service(self) -> CustomerServiceClient:
"""Connect and authenticate with the Google Ads API using a service account."""
with NamedTemporaryFile("w", suffix=".json") as secrets_temp:
self._get_config()
self._update_config_with_secret(secrets_temp)
try:
client = GoogleAdsClient.load_from_dict(self.google_ads_config)
return client.get_service("CustomerService", version=self.api_version)
except GoogleAuthError as e:
self.log.error("Google Auth Error: %s", e)
raise
def _get_config(self) -> None:
"""Set up Google Ads config from Connection.
This pulls the connections from db, and uses it to set up
``google_ads_config``.
"""
conn = self.get_connection(self.google_ads_conn_id)
if "google_ads_client" not in conn.extra_dejson:
raise AirflowException("google_ads_client not found in extra field")
self.google_ads_config = conn.extra_dejson["google_ads_client"]
def _update_config_with_secret(self, secrets_temp: IO[str]) -> None:
"""Set up Google Cloud config secret from Connection.
This pulls the connection, saves the contents to a temp file, and point
the config to the path containing the secret. Note that the secret must
be passed as a file path for Google Ads API.
"""
extras = self.get_connection(self.gcp_conn_id).extra_dejson
secret = get_field(extras, "keyfile_dict")
if not secret:
raise KeyError("secret_conn.extra_dejson does not contain keyfile_dict")
secrets_temp.write(secret)
secrets_temp.flush()
self.google_ads_config["json_key_file_path"] = secrets_temp.name
def _search(
self, client_ids: list[str], query: str, page_size: int = 10000, **kwargs
) -> list[GoogleAdsRow]:
"""Pull data from the Google Ads API.
:param client_ids: Google Ads client ID(s) to query the API for.
:param query: Google Ads Query Language query.
:param page_size: Number of results to return per page. Max 10000.
:return: Google Ads API response, converted to Google Ads Row objects
"""
service = self._get_service
iterators = []
for client_id in client_ids:
iterator = service.search(request=dict(customer_id=client_id, query=query, page_size=page_size))
iterators.append(iterator)
self.log.info("Fetched Google Ads Iterators")
return self._extract_rows(iterators)
def _extract_rows(self, iterators: list[GRPCIterator]) -> list[GoogleAdsRow]:
"""Convert Google Page Iterator (GRPCIterator) objects to Google Ads Rows.
:param iterators: List of Google Page Iterator (GRPCIterator) objects
:return: API response for all clients in the form of Google Ads Row object(s)
"""
try:
self.log.info("Extracting data from returned Google Ads Iterators")
return [row for iterator in iterators for row in iterator]
except GoogleAdsException as e:
self.log.error(
"Request ID %s failed with status %s and includes the following errors:",
e.request_id,
e.error.code().name,
)
for error in e.failure.errors:
self.log.error("\tError with message: %s.", error.message)
if error.location:
for field_path_element in error.location.field_path_elements:
self.log.error("\t\tOn field: %s", field_path_element.field_name)
raise
| 10,997 | 42.129412 | 108 | py |
airflow | airflow-main/airflow/providers/google/ads/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/common/consts.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from google.api_core.gapic_v1.client_info import ClientInfo
from airflow import version
GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME = "execute_complete"
CLIENT_INFO = ClientInfo(client_library_version="airflow_v" + version.version)
| 1,050 | 39.423077 | 78 | py |
airflow | airflow-main/airflow/providers/google/common/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/common/links/storage.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a link for GCS Storage assets."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
BASE_LINK = "https://console.cloud.google.com"
GCS_STORAGE_LINK = BASE_LINK + "/storage/browser/{uri};tab=objects?project={project_id}"
GCS_FILE_DETAILS_LINK = BASE_LINK + "/storage/browser/_details/{uri};tab=live_object?project={project_id}"
if TYPE_CHECKING:
from airflow.utils.context import Context
class StorageLink(BaseGoogleLink):
"""Helper class for constructing GCS Storage link."""
name = "GCS Storage"
key = "storage_conf"
format_str = GCS_STORAGE_LINK
@staticmethod
def persist(context: Context, task_instance, uri: str, project_id: str | None):
task_instance.xcom_push(
context=context,
key=StorageLink.key,
value={"uri": uri, "project_id": project_id},
)
class FileDetailsLink(BaseGoogleLink):
"""Helper class for constructing GCS file details link."""
name = "GCS File Details"
key = "file_details"
format_str = GCS_FILE_DETAILS_LINK
@staticmethod
def persist(context: Context, task_instance: BaseOperator, uri: str, project_id: str | None):
task_instance.xcom_push(
context=context,
key=FileDetailsLink.key,
value={"uri": uri, "project_id": project_id},
)
| 2,260 | 34.888889 | 106 | py |
airflow | airflow-main/airflow/providers/google/common/links/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/common/hooks/base_google.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a Google Cloud API base hook."""
from __future__ import annotations
import functools
import json
import logging
import os
import tempfile
import warnings
from contextlib import ExitStack, contextmanager
from subprocess import check_output
from typing import Any, Callable, Generator, Sequence, TypeVar, cast
import google.auth
import google.auth.credentials
import google.oauth2.service_account
import google_auth_httplib2
import requests
import tenacity
from asgiref.sync import sync_to_async
from google.api_core.exceptions import Forbidden, ResourceExhausted, TooManyRequests
from google.api_core.gapic_v1.client_info import ClientInfo
from google.auth import _cloud_sdk, compute_engine
from google.auth.environment_vars import CLOUD_SDK_CONFIG_DIR, CREDENTIALS
from google.auth.exceptions import RefreshError
from google.auth.transport import _http_client
from googleapiclient import discovery
from googleapiclient.errors import HttpError
from googleapiclient.http import MediaIoBaseDownload, build_http, set_user_agent
from airflow import version
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
from airflow.providers.google.cloud.utils.credentials_provider import (
_get_scopes,
_get_target_principal_and_delegates,
get_credentials_and_project_id,
)
from airflow.providers.google.common.consts import CLIENT_INFO
from airflow.utils.process_utils import patch_environ
log = logging.getLogger(__name__)
# Constants used by the mechanism of repeating requests in reaction to exceeding the temporary quota.
INVALID_KEYS = [
"DefaultRequestsPerMinutePerProject",
"DefaultRequestsPerMinutePerUser",
"RequestsPerMinutePerProject",
"Resource has been exhausted (e.g. check quota).",
]
INVALID_REASONS = [
"userRateLimitExceeded",
]
def is_soft_quota_exception(exception: Exception):
"""
API for Google services does not have a standardized way to report quota violation errors.
The function has been adapted by trial and error to the following services:
* Google Translate
* Google Vision
* Google Text-to-Speech
* Google Speech-to-Text
* Google Natural Language
* Google Video Intelligence
"""
if isinstance(exception, Forbidden):
return any(reason in error.details() for reason in INVALID_REASONS for error in exception.errors)
if isinstance(exception, (ResourceExhausted, TooManyRequests)):
return any(key in error.details() for key in INVALID_KEYS for error in exception.errors)
return False
def is_operation_in_progress_exception(exception: Exception) -> bool:
"""
Some calls return 429 (too many requests!) or 409 errors (Conflict) in case of operation in progress.
* Google Cloud SQL
"""
if isinstance(exception, HttpError):
return exception.resp.status == 429 or exception.resp.status == 409
return False
class retry_if_temporary_quota(tenacity.retry_if_exception):
"""Retries if there was an exception for exceeding the temporary quote limit."""
def __init__(self):
super().__init__(is_soft_quota_exception)
class retry_if_operation_in_progress(tenacity.retry_if_exception):
"""Retries if there was an exception for exceeding the temporary quote limit."""
def __init__(self):
super().__init__(is_operation_in_progress_exception)
# A fake project_id to use in functions decorated by fallback_to_default_project_id
# This allows the 'project_id' argument to be of type str instead of str | None,
# making it easier to type hint the function body without dealing with the None
# case that can never happen at runtime.
PROVIDE_PROJECT_ID: str = cast(str, None)
T = TypeVar("T", bound=Callable)
RT = TypeVar("RT")
def get_field(extras: dict, field_name: str):
"""Get field from extra, first checking short name, then for backcompat we check for prefixed name."""
if field_name.startswith("extra__"):
raise ValueError(
f"Got prefixed name {field_name}; please remove the 'extra__google_cloud_platform__' prefix "
"when using this method."
)
if field_name in extras:
return extras[field_name] or None
prefixed_name = f"extra__google_cloud_platform__{field_name}"
return extras.get(prefixed_name) or None
class GoogleBaseHook(BaseHook):
"""
A base hook for Google cloud-related hooks.
Google cloud has a shared REST API client that is built in the same way no matter
which service you use. This class helps construct and authorize the credentials
needed to then call googleapiclient.discovery.build() to actually discover and
build a client for a Google cloud service.
The class also contains some miscellaneous helper functions.
All hook derived from this base hook use the 'Google Cloud' connection
type. Three ways of authentication are supported:
Default credentials: Only the 'Project Id' is required. You'll need to
have set up default credentials, such as by the
``GOOGLE_APPLICATION_DEFAULT`` environment variable or from the metadata
server on Google Compute Engine.
JSON key file: Specify 'Project Id', 'Keyfile Path' and 'Scope'.
Legacy P12 key files are not supported.
JSON data provided in the UI: Specify 'Keyfile JSON'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled. The usage of this parameter should be limited only to Google Workspace
(gsuite) and marketing platform operators and hooks. It is deprecated for usage by Google Cloud
and Firebase operators and hooks, as well as transfer operators in other providers that involve
Google cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
conn_name_attr = "gcp_conn_id"
default_conn_name = "google_cloud_default"
conn_type = "google_cloud_platform"
hook_name = "Google Cloud"
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import IntegerField, PasswordField, StringField
from wtforms.validators import NumberRange
return {
"project": StringField(lazy_gettext("Project Id"), widget=BS3TextFieldWidget()),
"key_path": StringField(lazy_gettext("Keyfile Path"), widget=BS3TextFieldWidget()),
"keyfile_dict": PasswordField(lazy_gettext("Keyfile JSON"), widget=BS3PasswordFieldWidget()),
"credential_config_file": StringField(
lazy_gettext("Credential Configuration File"), widget=BS3TextFieldWidget()
),
"scope": StringField(lazy_gettext("Scopes (comma separated)"), widget=BS3TextFieldWidget()),
"key_secret_name": StringField(
lazy_gettext("Keyfile Secret Name (in GCP Secret Manager)"), widget=BS3TextFieldWidget()
),
"key_secret_project_id": StringField(
lazy_gettext("Keyfile Secret Project Id (in GCP Secret Manager)"), widget=BS3TextFieldWidget()
),
"num_retries": IntegerField(
lazy_gettext("Number of Retries"),
validators=[NumberRange(min=0)],
widget=BS3TextFieldWidget(),
default=5,
),
}
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["host", "schema", "login", "password", "port", "extra"],
"relabeling": {},
}
def __init__(
self,
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__()
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.extras: dict = self.get_connection(self.gcp_conn_id).extra_dejson
self._cached_credentials: google.auth.credentials.Credentials | None = None
self._cached_project_id: str | None = None
def get_credentials_and_project_id(self) -> tuple[google.auth.credentials.Credentials, str | None]:
"""Returns the Credentials object for Google API and the associated project_id."""
if self._cached_credentials is not None:
return self._cached_credentials, self._cached_project_id
key_path: str | None = self._get_field("key_path", None)
try:
keyfile_dict: str | dict[str, str] | None = self._get_field("keyfile_dict", None)
keyfile_dict_json: dict[str, str] | None = None
if keyfile_dict:
if isinstance(keyfile_dict, dict):
keyfile_dict_json = keyfile_dict
else:
keyfile_dict_json = json.loads(keyfile_dict)
except json.decoder.JSONDecodeError:
raise AirflowException("Invalid key JSON.")
key_secret_name: str | None = self._get_field("key_secret_name", None)
key_secret_project_id: str | None = self._get_field("key_secret_project_id", None)
credential_config_file: str | None = self._get_field("credential_config_file", None)
target_principal, delegates = _get_target_principal_and_delegates(self.impersonation_chain)
credentials, project_id = get_credentials_and_project_id(
key_path=key_path,
keyfile_dict=keyfile_dict_json,
credential_config_file=credential_config_file,
key_secret_name=key_secret_name,
key_secret_project_id=key_secret_project_id,
scopes=self.scopes,
delegate_to=self.delegate_to,
target_principal=target_principal,
delegates=delegates,
)
overridden_project_id = self._get_field("project")
if overridden_project_id:
project_id = overridden_project_id
self._cached_credentials = credentials
self._cached_project_id = project_id
return credentials, project_id
def get_credentials(self) -> google.auth.credentials.Credentials:
"""Returns the Credentials object for Google API."""
credentials, _ = self.get_credentials_and_project_id()
return credentials
def _get_access_token(self) -> str:
"""Returns a valid access token from Google API Credentials."""
credentials = self.get_credentials()
auth_req = google.auth.transport.requests.Request()
# credentials.token is None
# Need to refresh credentials to populate the token
credentials.refresh(auth_req)
return credentials.token
@functools.lru_cache(maxsize=None)
def _get_credentials_email(self) -> str:
"""
Returns the email address associated with the currently logged in account.
If a service account is used, it returns the service account.
If user authentication (e.g. gcloud auth) is used, it returns the e-mail account of that user.
"""
credentials = self.get_credentials()
if isinstance(credentials, compute_engine.Credentials):
try:
credentials.refresh(_http_client.Request())
except RefreshError as msg:
"""
If the Compute Engine metadata service can't be reached in this case the instance has not
credentials.
"""
self.log.debug(msg)
service_account_email = getattr(credentials, "service_account_email", None)
if service_account_email:
return service_account_email
http_authorized = self._authorize()
oauth2_client = discovery.build("oauth2", "v1", http=http_authorized, cache_discovery=False)
return oauth2_client.tokeninfo().execute()["email"]
def _authorize(self) -> google_auth_httplib2.AuthorizedHttp:
"""Returns an authorized HTTP object to be used to build a Google cloud service hook connection."""
credentials = self.get_credentials()
http = build_http()
http = set_user_agent(http, "airflow/" + version.version)
authed_http = google_auth_httplib2.AuthorizedHttp(credentials, http=http)
return authed_http
def _get_field(self, f: str, default: Any = None) -> Any:
"""
Fetches a field from extras, and returns it.
This is some Airflow magic. The google_cloud_platform hook type adds
custom UI elements to the hook page, which allow admins to specify
service_account, key_path, etc. They get formatted as shown below.
"""
return hasattr(self, "extras") and get_field(self.extras, f) or default
@property
def project_id(self) -> str | None:
"""
Returns project id.
:return: id of the project
"""
_, project_id = self.get_credentials_and_project_id()
return project_id
@property
def num_retries(self) -> int:
"""
Returns num_retries from Connection.
:return: the number of times each API request should be retried
"""
field_value = self._get_field("num_retries", default=5)
if field_value is None:
return 5
if isinstance(field_value, str) and field_value.strip() == "":
return 5
try:
return int(field_value)
except ValueError:
raise AirflowException(
f"The num_retries field should be a integer. "
f'Current value: "{field_value}" (type: {type(field_value)}). '
f"Please check the connection configuration."
)
@property
def client_info(self) -> ClientInfo:
"""
Return client information used to generate a user-agent for API calls.
It allows for better errors tracking.
This object is only used by the google-cloud-* libraries that are built specifically for
the Google Cloud. It is not supported by The Google APIs Python Client that use Discovery
based APIs.
"""
warnings.warn(
"This method is deprecated, please use `airflow.providers.google.common.consts.CLIENT_INFO`.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
return CLIENT_INFO
@property
def scopes(self) -> Sequence[str]:
"""
Return OAuth 2.0 scopes.
:return: Returns the scope defined in the connection configuration, or the default scope
"""
scope_value: str | None = self._get_field("scope", None)
return _get_scopes(scope_value)
@staticmethod
def quota_retry(*args, **kwargs) -> Callable:
"""Provides a mechanism to repeat requests in response to exceeding a temporary quota limit."""
def decorator(fun: Callable):
default_kwargs = {
"wait": tenacity.wait_exponential(multiplier=1, max=100),
"retry": retry_if_temporary_quota(),
"before": tenacity.before_log(log, logging.DEBUG),
"after": tenacity.after_log(log, logging.DEBUG),
}
default_kwargs.update(**kwargs)
return tenacity.retry(*args, **default_kwargs)(fun)
return decorator
@staticmethod
def operation_in_progress_retry(*args, **kwargs) -> Callable[[T], T]:
"""Provides a mechanism to repeat requests in response to operation in progress (HTTP 409) limit."""
def decorator(fun: T):
default_kwargs = {
"wait": tenacity.wait_exponential(multiplier=1, max=300),
"retry": retry_if_operation_in_progress(),
"before": tenacity.before_log(log, logging.DEBUG),
"after": tenacity.after_log(log, logging.DEBUG),
}
default_kwargs.update(**kwargs)
return cast(T, tenacity.retry(*args, **default_kwargs)(fun))
return decorator
@staticmethod
def fallback_to_default_project_id(func: Callable[..., RT]) -> Callable[..., RT]:
"""
Decorator that provides fallback for Google Cloud project id.
If the project is None it will be replaced with the project_id from the
service account the Hook is authenticated with. Project id can be specified
either via project_id kwarg or via first parameter in positional args.
:param func: function to wrap
:return: result of the function call
"""
@functools.wraps(func)
def inner_wrapper(self: GoogleBaseHook, *args, **kwargs) -> RT:
if args:
raise AirflowException(
"You must use keyword arguments in this methods rather than positional"
)
if "project_id" in kwargs:
kwargs["project_id"] = kwargs["project_id"] or self.project_id
else:
kwargs["project_id"] = self.project_id
if not kwargs["project_id"]:
raise AirflowException(
"The project id must be passed either as "
"keyword project_id parameter or as project_id extra "
"in Google Cloud connection definition. Both are not set!"
)
return func(self, *args, **kwargs)
return inner_wrapper
@staticmethod
def provide_gcp_credential_file(func: T) -> T:
"""
Provides a Google Cloud credentials for Application Default Credentials (ADC) strategy support.
It is recommended to use ``provide_gcp_credential_file_as_context`` context
manager to limit the scope when authorization data is available. Using context
manager also makes it easier to use multiple connection in one function.
"""
@functools.wraps(func)
def wrapper(self: GoogleBaseHook, *args, **kwargs):
with self.provide_gcp_credential_file_as_context():
return func(self, *args, **kwargs)
return cast(T, wrapper)
@contextmanager
def provide_gcp_credential_file_as_context(self) -> Generator[str | None, None, None]:
"""
Provides a Google Cloud credentials for Application Default Credentials (ADC) strategy support.
See:
`Application Default Credentials (ADC)
strategy <https://cloud.google.com/docs/authentication/production>`__.
It can be used to provide credentials for external programs (e.g. gcloud) that expect authorization
file in ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable.
"""
key_path: str | None = self._get_field("key_path", None)
keyfile_dict: str | dict[str, str] | None = self._get_field("keyfile_dict", None)
if key_path and keyfile_dict:
raise AirflowException(
"The `keyfile_dict` and `key_path` fields are mutually exclusive. "
"Please provide only one value."
)
elif key_path:
if key_path.endswith(".p12"):
raise AirflowException("Legacy P12 key file are not supported, use a JSON key file.")
with patch_environ({CREDENTIALS: key_path}):
yield key_path
elif keyfile_dict:
with tempfile.NamedTemporaryFile(mode="w+t") as conf_file:
if isinstance(keyfile_dict, dict):
keyfile_dict = json.dumps(keyfile_dict)
conf_file.write(keyfile_dict)
conf_file.flush()
with patch_environ({CREDENTIALS: conf_file.name}):
yield conf_file.name
else:
# We will use the default service account credentials.
yield None
@contextmanager
def provide_authorized_gcloud(self) -> Generator[None, None, None]:
"""
Provides a separate gcloud configuration with current credentials.
The gcloud tool allows you to login to Google Cloud only - ``gcloud auth login`` and
for the needs of Application Default Credentials ``gcloud auth application-default login``.
In our case, we want all commands to use only the credentials from ADCm so
we need to configure the credentials in gcloud manually.
"""
credentials_path = _cloud_sdk.get_application_default_credentials_path()
project_id = self.project_id
with ExitStack() as exit_stack:
exit_stack.enter_context(self.provide_gcp_credential_file_as_context())
gcloud_config_tmp = exit_stack.enter_context(tempfile.TemporaryDirectory())
exit_stack.enter_context(patch_environ({CLOUD_SDK_CONFIG_DIR: gcloud_config_tmp}))
if CREDENTIALS in os.environ:
# This solves most cases when we are logged in using the service key in Airflow.
# Don't display stdout/stderr for security reason
check_output(
[
"gcloud",
"auth",
"activate-service-account",
f"--key-file={os.environ[CREDENTIALS]}",
]
)
elif os.path.exists(credentials_path):
# If we are logged in by `gcloud auth application-default` then we need to log in manually.
# This will make the `gcloud auth application-default` and `gcloud auth` credentials equals.
with open(credentials_path) as creds_file:
creds_content = json.loads(creds_file.read())
# Don't display stdout/stderr for security reason
check_output(["gcloud", "config", "set", "auth/client_id", creds_content["client_id"]])
# Don't display stdout/stderr for security reason
check_output(
["gcloud", "config", "set", "auth/client_secret", creds_content["client_secret"]]
)
# Don't display stdout/stderr for security reason
check_output(
[
"gcloud",
"auth",
"activate-refresh-token",
creds_content["client_id"],
creds_content["refresh_token"],
]
)
if project_id:
# Don't display stdout/stderr for security reason
check_output(["gcloud", "config", "set", "core/project", project_id])
yield
@staticmethod
def download_content_from_request(file_handle, request: dict, chunk_size: int) -> None:
"""
Download media resources.
Note that the Python file object is compatible with io.Base and can be used with this class also.
:param file_handle: io.Base or file object. The stream in which to write the downloaded bytes.
:param request: googleapiclient.http.HttpRequest, the media request to perform in chunks.
:param chunk_size: int, File will be downloaded in chunks of this many bytes.
"""
downloader = MediaIoBaseDownload(file_handle, request, chunksize=chunk_size)
done = False
while done is False:
_, done = downloader.next_chunk()
file_handle.flush()
def test_connection(self):
"""Test the Google cloud connectivity from UI."""
status, message = False, ""
try:
token = self._get_access_token()
url = f"https://www.googleapis.com/oauth2/v3/tokeninfo?access_token={token}"
response = requests.post(url)
if response.status_code == 200:
status = True
message = "Connection successfully tested"
except Exception as e:
status = False
message = str(e)
return status, message
class GoogleBaseAsyncHook(BaseHook):
"""GoogleBaseAsyncHook inherits from BaseHook class, run on the trigger worker."""
sync_hook_class: Any = None
def __init__(self, **kwargs: Any):
self._hook_kwargs = kwargs
self._sync_hook = None
async def get_sync_hook(self) -> Any:
"""Sync version of the Google Cloud Hook makes blocking calls in ``__init__``; don't inherit it."""
if not self._sync_hook:
self._sync_hook = await sync_to_async(self.sync_hook_class)(**self._hook_kwargs)
return self._sync_hook
async def service_file_as_context(self) -> Any:
sync_hook = await self.get_sync_hook()
return await sync_to_async(sync_hook.provide_gcp_credential_file_as_context)()
| 26,568 | 40.973144 | 110 | py |
airflow | airflow-main/airflow/providers/google/common/hooks/discovery_api.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module allows you to connect to the Google Discovery API Service and query it."""
from __future__ import annotations
from typing import Sequence
from googleapiclient.discovery import Resource, build
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GoogleDiscoveryApiHook(GoogleBaseHook):
"""
A hook to use the Google API Discovery Service.
:param api_service_name: The name of the api service that is needed to get the data
for example 'youtube'.
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
_conn: Resource | None = None
def __init__(
self,
api_service_name: str,
api_version: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
impersonation_chain=impersonation_chain,
)
self.api_service_name = api_service_name
self.api_version = api_version
def get_conn(self) -> Resource:
"""
Creates an authenticated api client for the given api service name and credentials.
:return: the authenticated api service.
"""
self.log.info("Authenticating Google API Client")
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
serviceName=self.api_service_name,
version=self.api_version,
http=http_authorized,
cache_discovery=False,
)
return self._conn
def query(self, endpoint: str, data: dict, paginate: bool = False, num_retries: int = 0) -> dict:
"""
Creates a dynamic API call to any Google API registered in Google's API Client Library and queries it.
:param endpoint: The client libraries path to the api call's executing method.
For example: 'analyticsreporting.reports.batchGet'
.. seealso:: https://developers.google.com/apis-explorer
for more information on what methods are available.
:param data: The data (endpoint params) needed for the specific request to given endpoint.
:param paginate: If set to True, it will collect all pages of data.
:param num_retries: Define the number of retries for the requests being made if it fails.
:return: the API response from the passed endpoint.
"""
google_api_conn_client = self.get_conn()
api_response = self._call_api_request(google_api_conn_client, endpoint, data, paginate, num_retries)
return api_response
def _call_api_request(self, google_api_conn_client, endpoint, data, paginate, num_retries):
api_endpoint_parts = endpoint.split(".")
google_api_endpoint_instance = self._build_api_request(
google_api_conn_client, api_sub_functions=api_endpoint_parts[1:], api_endpoint_params=data
)
if paginate:
return self._paginate_api(
google_api_endpoint_instance, google_api_conn_client, api_endpoint_parts, num_retries
)
return google_api_endpoint_instance.execute(num_retries=num_retries)
def _build_api_request(self, google_api_conn_client, api_sub_functions, api_endpoint_params):
for sub_function in api_sub_functions:
google_api_conn_client = getattr(google_api_conn_client, sub_function)
if sub_function != api_sub_functions[-1]:
google_api_conn_client = google_api_conn_client()
else:
google_api_conn_client = google_api_conn_client(**api_endpoint_params)
return google_api_conn_client
def _paginate_api(
self, google_api_endpoint_instance, google_api_conn_client, api_endpoint_parts, num_retries
):
api_responses = []
while google_api_endpoint_instance:
api_response = google_api_endpoint_instance.execute(num_retries=num_retries)
api_responses.append(api_response)
google_api_endpoint_instance = self._build_next_api_request(
google_api_conn_client, api_endpoint_parts[1:], google_api_endpoint_instance, api_response
)
return api_responses
def _build_next_api_request(
self, google_api_conn_client, api_sub_functions, api_endpoint_instance, api_response
):
for sub_function in api_sub_functions:
if sub_function != api_sub_functions[-1]:
google_api_conn_client = getattr(google_api_conn_client, sub_function)
google_api_conn_client = google_api_conn_client()
else:
google_api_conn_client = getattr(google_api_conn_client, sub_function + "_next")
google_api_conn_client = google_api_conn_client(api_endpoint_instance, api_response)
return google_api_conn_client
| 6,691 | 42.738562 | 110 | py |
airflow | airflow-main/airflow/providers/google/common/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/common/utils/id_token_credentials.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
You can execute this module to get ID Token.
python -m airflow.providers.google.common.utils.id_token_credentials_provider
To obtain info about this token, run the following commands:
ID_TOKEN="$(python -m airflow.providers.google.common.utils.id_token_credentials)"
curl "https://www.googleapis.com/oauth2/v3/tokeninfo?id_token=${ID_TOKEN}" -v
.. spelling:word-list::
RefreshError
"""
from __future__ import annotations
import json
import os
import google.auth.transport
import google.oauth2
from google.auth import credentials as google_auth_credentials, environment_vars, exceptions
from google.oauth2 import credentials as oauth2_credentials, service_account
# Valid types accepted for file-based credentials.
# They are taken from "google.auth._default" and since they are all "protected" and the imports might
# change any time and fail the whole Google provider functionality - we should inline them
_AUTHORIZED_USER_TYPE = "authorized_user"
_SERVICE_ACCOUNT_TYPE = "service_account"
_EXTERNAL_ACCOUNT_TYPE = "external_account"
_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = "external_account_authorized_user"
_IMPERSONATED_SERVICE_ACCOUNT_TYPE = "impersonated_service_account"
_GDCH_SERVICE_ACCOUNT_TYPE = "gdch_service_account"
_VALID_TYPES = (
_AUTHORIZED_USER_TYPE,
_SERVICE_ACCOUNT_TYPE,
_EXTERNAL_ACCOUNT_TYPE,
_EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE,
_IMPERSONATED_SERVICE_ACCOUNT_TYPE,
_GDCH_SERVICE_ACCOUNT_TYPE,
)
class IDTokenCredentialsAdapter(google_auth_credentials.Credentials):
"""Convert Credentials with ``openid`` scope to IDTokenCredentials."""
def __init__(self, credentials: oauth2_credentials.Credentials):
super().__init__()
self.credentials = credentials
self.token = credentials.id_token
@property
def expired(self):
return self.credentials.expired
def refresh(self, request):
self.credentials.refresh(request)
self.token = self.credentials.id_token
def _load_credentials_from_file(
filename: str, target_audience: str | None
) -> google_auth_credentials.Credentials | None:
"""
Loads credentials from a file.
The credentials file must be a service account key or a stored authorized user credential.
:param filename: The full path to the credentials file.
:return: Loaded credentials
:raise google.auth.exceptions.DefaultCredentialsError: if the file is in the wrong format or is missing.
"""
if not os.path.exists(filename):
raise exceptions.DefaultCredentialsError(f"File {filename} was not found.")
with open(filename) as file_obj:
try:
info = json.load(file_obj)
except json.JSONDecodeError:
raise exceptions.DefaultCredentialsError(f"File {filename} is not a valid json file.")
# The type key should indicate that the file is either a service account
# credentials file or an authorized user credentials file.
credential_type = info.get("type")
if credential_type == _AUTHORIZED_USER_TYPE:
current_credentials = oauth2_credentials.Credentials.from_authorized_user_info(
info, scopes=["openid", "email"]
)
current_credentials = IDTokenCredentialsAdapter(credentials=current_credentials)
return current_credentials
elif credential_type == _SERVICE_ACCOUNT_TYPE:
try:
return service_account.IDTokenCredentials.from_service_account_info(
info, target_audience=target_audience
)
except ValueError:
raise exceptions.DefaultCredentialsError(
f"Failed to load service account credentials from {filename}"
)
raise exceptions.DefaultCredentialsError(
f"The file {filename} does not have a valid type. Type is {credential_type}, "
f"expected one of {_VALID_TYPES}."
)
def _get_explicit_environ_credentials(
target_audience: str | None,
) -> google_auth_credentials.Credentials | None:
"""Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment variable."""
explicit_file = os.environ.get(environment_vars.CREDENTIALS)
if explicit_file is None:
return None
current_credentials = _load_credentials_from_file(
os.environ[environment_vars.CREDENTIALS], target_audience=target_audience
)
return current_credentials
def _get_gcloud_sdk_credentials(
target_audience: str | None,
) -> google_auth_credentials.Credentials | None:
"""Gets the credentials and project ID from the Cloud SDK."""
from google.auth import _cloud_sdk
# Check if application default credentials exist.
credentials_filename = _cloud_sdk.get_application_default_credentials_path()
if not os.path.isfile(credentials_filename):
return None
current_credentials = _load_credentials_from_file(credentials_filename, target_audience)
return current_credentials
def _get_gce_credentials(
target_audience: str | None, request: google.auth.transport.Request | None = None
) -> google_auth_credentials.Credentials | None:
"""Gets credentials and project ID from the GCE Metadata Service."""
# Ping requires a transport, but we want application default credentials
# to require no arguments. So, we'll use the _http_client transport which
# uses http.client. This is only acceptable because the metadata server
# doesn't do SSL and never requires proxies.
# While this library is normally bundled with compute_engine, there are
# some cases where it's not available, so we tolerate ImportError.
try:
from google.auth import compute_engine
from google.auth.compute_engine import _metadata
except ImportError:
return None
from google.auth.transport import _http_client
if request is None:
request = _http_client.Request()
if _metadata.ping(request=request):
return compute_engine.IDTokenCredentials(
request, target_audience, use_metadata_identity_endpoint=True
)
return None
def get_default_id_token_credentials(
target_audience: str | None, request: google.auth.transport.Request = None
) -> google_auth_credentials.Credentials:
"""Gets the default ID Token credentials for the current environment.
`Application Default Credentials`_ provides an easy way to obtain credentials to call Google APIs for
server-to-server or local applications.
.. _Application Default Credentials: https://developers.google.com\
/identity/protocols/application-default-credentials
:param target_audience: The intended audience for these credentials.
:param request: An object used to make HTTP requests. This is used to detect whether the application
is running on Compute Engine. If not specified, then it will use the standard library http client
to make requests.
:return: the current environment's credentials.
:raises ~google.auth.exceptions.DefaultCredentialsError:
If no credentials were found, or if the credentials found were invalid.
"""
checkers = (
lambda: _get_explicit_environ_credentials(target_audience),
lambda: _get_gcloud_sdk_credentials(target_audience),
lambda: _get_gce_credentials(target_audience, request),
)
for checker in checkers:
current_credentials = checker()
if current_credentials is not None:
return current_credentials
raise exceptions.DefaultCredentialsError(
f"""Could not automatically determine credentials. Please set {environment_vars.CREDENTIALS} or
explicitly create credentials and re-run the application. For more information, please see
https://cloud.google.com/docs/authentication/getting-started
""".strip()
)
if __name__ == "__main__":
from google.auth.transport import requests
request_adapter = requests.Request()
creds = get_default_id_token_credentials(target_audience=None)
creds.refresh(request=request_adapter)
print(creds.token)
| 8,868 | 36.901709 | 109 | py |
airflow | airflow-main/airflow/providers/google/common/utils/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/common/auth_backend/google_openid.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Authentication backend that use Google credentials for authorization."""
from __future__ import annotations
import logging
from functools import wraps
from typing import Callable, TypeVar, cast
import google
import google.auth.transport.requests
import google.oauth2.id_token
from flask import Response, current_app, request as flask_request # type: ignore
from google.auth import exceptions
from google.auth.transport.requests import AuthorizedSession
from google.oauth2 import service_account
from airflow.configuration import conf
from airflow.providers.google.common.utils.id_token_credentials import get_default_id_token_credentials
log = logging.getLogger(__name__)
_GOOGLE_ISSUERS = ("accounts.google.com", "https://accounts.google.com")
AUDIENCE = conf.get("api", "google_oauth2_audience")
def create_client_session():
"""Create a HTTP authorized client."""
service_account_path = conf.get("api", "google_key_path")
if service_account_path:
id_token_credentials = service_account.IDTokenCredentials.from_service_account_file(
service_account_path
)
else:
id_token_credentials = get_default_id_token_credentials(target_audience=AUDIENCE)
return AuthorizedSession(credentials=id_token_credentials)
def init_app(_):
"""Initializes authentication."""
def _get_id_token_from_request(request) -> str | None:
authorization_header = request.headers.get("Authorization")
if not authorization_header:
return None
authorization_header_parts = authorization_header.split(" ", 2)
if len(authorization_header_parts) != 2 or authorization_header_parts[0].lower() != "bearer":
return None
id_token = authorization_header_parts[1]
return id_token
def _verify_id_token(id_token: str) -> str | None:
try:
request_adapter = google.auth.transport.requests.Request()
id_info = google.oauth2.id_token.verify_token(id_token, request_adapter, AUDIENCE)
except exceptions.GoogleAuthError:
return None
# This check is part of google-auth v1.19.0 (2020-07-09), In order not to create strong version
# requirements to too new version, we check it in our code too.
# One day, we may delete this code and set minimum version in requirements.
if id_info.get("iss") not in _GOOGLE_ISSUERS:
return None
if not id_info.get("email_verified", False):
return None
return id_info.get("email")
def _lookup_user(user_email: str):
security_manager = current_app.appbuilder.sm # type: ignore[attr-defined]
user = security_manager.find_user(email=user_email)
if not user:
return None
if not user.is_active:
return None
return user
def _set_current_user(user):
current_app.appbuilder.sm.lm._update_request_context_with_user(user=user) # type: ignore[attr-defined]
T = TypeVar("T", bound=Callable)
def requires_authentication(function: T):
"""Decorator for functions that require authentication."""
@wraps(function)
def decorated(*args, **kwargs):
access_token = _get_id_token_from_request(flask_request)
if not access_token:
log.debug("Missing ID Token")
return Response("Forbidden", 403)
userid = _verify_id_token(access_token)
if not userid:
log.debug("Invalid ID Token")
return Response("Forbidden", 403)
log.debug("Looking for user with e-mail: %s", userid)
user = _lookup_user(userid)
if not user:
return Response("Forbidden", 403)
log.debug("Found user: %s", user)
_set_current_user(user)
return function(*args, **kwargs)
return cast(T, decorated)
| 4,517 | 31.271429 | 107 | py |
airflow | airflow-main/airflow/providers/google/common/auth_backend/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/leveldb/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/leveldb/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/leveldb/operators/leveldb.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from airflow.models import BaseOperator
from airflow.providers.google.leveldb.hooks.leveldb import LevelDBHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class LevelDBOperator(BaseOperator):
"""
Execute command in LevelDB.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:LevelDBOperator`
:param command: command of plyvel(python wrap for leveldb) for DB object e.g.
``"put"``, ``"get"``, ``"delete"``, ``"write_batch"``.
:param key: key for command(put,get,delete) execution(, e.g. ``b'key'``, ``b'another-key'``)
:param value: value for command(put) execution(bytes, e.g. ``b'value'``, ``b'another-value'``)
:param keys: keys for command(write_batch) execution(list[bytes], e.g. ``[b'key', b'another-key'])``
:param values: values for command(write_batch) execution e.g. ``[b'value'``, ``b'another-value']``
:param leveldb_conn_id:
:param create_if_missing: whether a new database should be created if needed
:param create_db_extra_options: extra options of creation LevelDBOperator. See more in the link below
`Plyvel DB <https://plyvel.readthedocs.io/en/latest/api.html#DB>`__
"""
def __init__(
self,
*,
command: str,
key: bytes,
value: bytes | None = None,
keys: list[bytes] | None = None,
values: list[bytes] | None = None,
leveldb_conn_id: str = "leveldb_default",
name: str = "/tmp/testdb/",
create_if_missing: bool = True,
create_db_extra_options: dict[str, Any] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.command = command
self.key = key
self.value = value
self.keys = keys
self.values = values
self.leveldb_conn_id = leveldb_conn_id
self.name = name
self.create_if_missing = create_if_missing
self.create_db_extra_options = create_db_extra_options or {}
def execute(self, context: Context) -> str | None:
"""
Execute command in LevelDB.
:returns: value from get(str, not bytes, to prevent error in json.dumps in serialize_value in xcom.py)
or str | None
"""
leveldb_hook = LevelDBHook(leveldb_conn_id=self.leveldb_conn_id)
leveldb_hook.get_conn(
name=self.name, create_if_missing=self.create_if_missing, **self.create_db_extra_options
)
value = leveldb_hook.run(
command=self.command,
key=self.key,
value=self.value,
keys=self.keys,
values=self.values,
)
self.log.info("Done. Returned value was: %s", str(value))
leveldb_hook.close_conn()
str_value = value if value is None else value.decode()
return str_value
| 3,793 | 38.936842 | 110 | py |
airflow | airflow-main/airflow/providers/google/leveldb/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/leveldb/hooks/leveldb.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Level DB."""
from __future__ import annotations
from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException
from airflow.hooks.base import BaseHook
try:
import plyvel
from plyvel import DB
except ImportError as e:
raise AirflowOptionalProviderFeatureException(e)
DB_NOT_INITIALIZED_BEFORE = "The `get_conn` method should be called before!"
class LevelDBHookException(AirflowException):
"""Exception specific for LevelDB."""
class LevelDBHook(BaseHook):
"""
Plyvel Wrapper to Interact With LevelDB Database.
`LevelDB Connection Documentation <https://plyvel.readthedocs.io/en/latest/>`__
"""
conn_name_attr = "leveldb_conn_id"
default_conn_name = "leveldb_default"
conn_type = "leveldb"
hook_name = "LevelDB"
def __init__(self, leveldb_conn_id: str = default_conn_name):
super().__init__()
self.leveldb_conn_id = leveldb_conn_id
self.connection = self.get_connection(leveldb_conn_id)
self.db: plyvel.DB | None = None
def get_conn(self, name: str = "/tmp/testdb/", create_if_missing: bool = False, **kwargs) -> DB:
"""
Creates `Plyvel DB <https://plyvel.readthedocs.io/en/latest/api.html#DB>`__.
:param name: path to create database e.g. `/tmp/testdb/`)
:param create_if_missing: whether a new database should be created if needed
:param kwargs: other options of creation plyvel.DB. See more in the link above.
:returns: DB
"""
if self.db is not None:
return self.db
self.db = plyvel.DB(name=name, create_if_missing=create_if_missing, **kwargs)
return self.db
def close_conn(self) -> None:
"""Closes connection."""
db = self.db
if db is not None:
db.close()
self.db = None
def run(
self,
command: str,
key: bytes,
value: bytes | None = None,
keys: list[bytes] | None = None,
values: list[bytes] | None = None,
) -> bytes | None:
"""
Execute operation with leveldb.
:param command: command of plyvel(python wrap for leveldb) for DB object e.g.
``"put"``, ``"get"``, ``"delete"``, ``"write_batch"``.
:param key: key for command(put,get,delete) execution(, e.g. ``b'key'``, ``b'another-key'``)
:param value: value for command(put) execution(bytes, e.g. ``b'value'``, ``b'another-value'``)
:param keys: keys for command(write_batch) execution(list[bytes], e.g. ``[b'key', b'another-key'])``
:param values: values for command(write_batch) execution e.g. ``[b'value'``, ``b'another-value']``
:returns: value from get or None
"""
if command == "put":
if not value:
raise Exception("Please provide `value`!")
return self.put(key, value)
elif command == "get":
return self.get(key)
elif command == "delete":
return self.delete(key)
elif command == "write_batch":
if not keys:
raise Exception("Please provide `keys`!")
if not values:
raise Exception("Please provide `values`!")
return self.write_batch(keys, values)
else:
raise LevelDBHookException("Unknown command for LevelDB hook")
def put(self, key: bytes, value: bytes):
"""
Put a single value into a leveldb db by key.
:param key: key for put execution, e.g. ``b'key'``, ``b'another-key'``
:param value: value for put execution e.g. ``b'value'``, ``b'another-value'``
"""
if not self.db:
raise Exception(DB_NOT_INITIALIZED_BEFORE)
self.db.put(key, value)
def get(self, key: bytes) -> bytes:
"""
Get a single value into a leveldb db by key.
:param key: key for get execution, e.g. ``b'key'``, ``b'another-key'``
:returns: value of key from db.get
"""
if not self.db:
raise Exception(DB_NOT_INITIALIZED_BEFORE)
return self.db.get(key)
def delete(self, key: bytes):
"""
Delete a single value in a leveldb db by key.
:param key: key for delete execution, e.g. ``b'key'``, ``b'another-key'``
"""
if not self.db:
raise Exception(DB_NOT_INITIALIZED_BEFORE)
self.db.delete(key)
def write_batch(self, keys: list[bytes], values: list[bytes]):
"""
Write batch of values in a leveldb db by keys.
:param keys: keys for write_batch execution e.g. ``[b'key', b'another-key']``
:param values: values for write_batch execution e.g. ``[b'value', b'another-value']``
"""
if not self.db:
raise Exception(DB_NOT_INITIALIZED_BEFORE)
with self.db.write_batch() as batch:
for i, key in enumerate(keys):
batch.put(key, values[i])
| 5,762 | 36.180645 | 108 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/operators/analytics.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Analytics 360 operators."""
from __future__ import annotations
import csv
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Any, Sequence
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.marketing_platform.hooks.analytics import GoogleAnalyticsHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleAnalyticsListAccountsOperator(BaseOperator):
"""
Lists all accounts to which the user has access.
.. seealso::
Check official API docs:
https://developers.google.com/analytics/devguides/config/mgmt/v3/mgmtReference/management/accounts/list
and for python client
http://googleapis.github.io/google-api-python-client/docs/dyn/analytics_v3.management.accounts.html#list
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleAnalyticsListAccountsOperator`
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"api_version",
"gcp_conn_id",
"impersonation_chain",
)
def __init__(
self,
*,
api_version: str = "v3",
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> list[dict[str, Any]]:
hook = GoogleAnalyticsHook(
api_version=self.api_version,
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
result = hook.list_accounts()
return result
class GoogleAnalyticsGetAdsLinkOperator(BaseOperator):
"""
Returns a web property-Google Ads link to which the user has access.
.. seealso::
Check official API docs:
https://developers.google.com/analytics/devguides/config/mgmt/v3/mgmtReference/management/webPropertyAdWordsLinks/get
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleAnalyticsGetAdsLinkOperator`
:param account_id: ID of the account which the given web property belongs to.
:param web_property_ad_words_link_id: Web property-Google Ads link ID.
:param web_property_id: Web property ID to retrieve the Google Ads link for.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"api_version",
"gcp_conn_id",
"account_id",
"web_property_ad_words_link_id",
"web_property_id",
"impersonation_chain",
)
def __init__(
self,
*,
account_id: str,
web_property_ad_words_link_id: str,
web_property_id: str,
api_version: str = "v3",
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.account_id = account_id
self.web_property_ad_words_link_id = web_property_ad_words_link_id
self.web_property_id = web_property_id
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> dict[str, Any]:
hook = GoogleAnalyticsHook(
api_version=self.api_version,
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
result = hook.get_ad_words_link(
account_id=self.account_id,
web_property_id=self.web_property_id,
web_property_ad_words_link_id=self.web_property_ad_words_link_id,
)
return result
class GoogleAnalyticsRetrieveAdsLinksListOperator(BaseOperator):
"""
Lists webProperty-Google Ads links for a given web property.
.. seealso::
Check official API docs:
https://developers.google.com/analytics/devguides/config/mgmt/v3/mgmtReference/management/webPropertyAdWordsLinks/list#http-request
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleAnalyticsRetrieveAdsLinksListOperator`
:param account_id: ID of the account which the given web property belongs to.
:param web_property_id: Web property UA-string to retrieve the Google Ads links for.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"api_version",
"gcp_conn_id",
"account_id",
"web_property_id",
"impersonation_chain",
)
def __init__(
self,
*,
account_id: str,
web_property_id: str,
api_version: str = "v3",
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.account_id = account_id
self.web_property_id = web_property_id
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> list[dict[str, Any]]:
hook = GoogleAnalyticsHook(
api_version=self.api_version,
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
result = hook.list_ad_words_links(
account_id=self.account_id,
web_property_id=self.web_property_id,
)
return result
class GoogleAnalyticsDataImportUploadOperator(BaseOperator):
"""
Take a file from Cloud Storage and uploads it to GA via data import API.
:param storage_bucket: The Google cloud storage bucket where the file is stored.
:param storage_name_object: The name of the object in the desired Google cloud
storage bucket. (templated) If the destination points to an existing
folder, the file will be taken from the specified folder.
:param account_id: The GA account Id (long) to which the data upload belongs.
:param web_property_id: The web property UA-string associated with the upload.
:param custom_data_source_id: The id to which the data import belongs
:param resumable_upload: flag to upload the file in a resumable fashion, using a
series of at least two requests.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param api_version: The version of the api that will be requested for example 'v3'.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"storage_bucket",
"storage_name_object",
"impersonation_chain",
)
def __init__(
self,
*,
storage_bucket: str,
storage_name_object: str,
account_id: str,
web_property_id: str,
custom_data_source_id: str,
resumable_upload: bool = False,
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
api_version: str = "v3",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.storage_bucket = storage_bucket
self.storage_name_object = storage_name_object
self.account_id = account_id
self.web_property_id = web_property_id
self.custom_data_source_id = custom_data_source_id
self.resumable_upload = resumable_upload
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.api_version = api_version
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> None:
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
ga_hook = GoogleAnalyticsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
with NamedTemporaryFile("w+") as tmp_file:
self.log.info(
"Downloading file from GCS: %s/%s ",
self.storage_bucket,
self.storage_name_object,
)
gcs_hook.download(
bucket_name=self.storage_bucket,
object_name=self.storage_name_object,
filename=tmp_file.name,
)
ga_hook.upload_data(
tmp_file.name,
self.account_id,
self.web_property_id,
self.custom_data_source_id,
self.resumable_upload,
)
class GoogleAnalyticsDeletePreviousDataUploadsOperator(BaseOperator):
"""
Deletes previous GA uploads to leave the latest file to control the size of the Data Set Quota.
:param account_id: The GA account Id (long) to which the data upload belongs.
:param web_property_id: The web property UA-string associated with the upload.
:param custom_data_source_id: The id to which the data import belongs.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param api_version: The version of the api that will be requested for example 'v3'.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = ("impersonation_chain",)
def __init__(
self,
account_id: str,
web_property_id: str,
custom_data_source_id: str,
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
api_version: str = "v3",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.account_id = account_id
self.web_property_id = web_property_id
self.custom_data_source_id = custom_data_source_id
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.api_version = api_version
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> None:
ga_hook = GoogleAnalyticsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
uploads = ga_hook.list_uploads(
account_id=self.account_id,
web_property_id=self.web_property_id,
custom_data_source_id=self.custom_data_source_id,
)
cids = [upload["id"] for upload in uploads]
delete_request_body = {"customDataImportUids": cids}
ga_hook.delete_upload_data(
self.account_id,
self.web_property_id,
self.custom_data_source_id,
delete_request_body,
)
class GoogleAnalyticsModifyFileHeadersDataImportOperator(BaseOperator):
"""
GA has a very particular naming convention for Data Import.
Ability to prefix "ga:" to all column headers and also a dict to rename columns to
match the custom dimension ID in GA i.e clientId : dimensionX.
:param storage_bucket: The Google cloud storage bucket where the file is stored.
:param storage_name_object: The name of the object in the desired Google cloud
storage bucket. (templated) If the destination points to an existing
folder, the file will be taken from the specified folder.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param custom_dimension_header_mapping: Dictionary to handle when uploading
custom dimensions which have generic IDs ie. 'dimensionX' which are
set by GA. Dictionary maps the current CSV header to GA ID which will
be the new header for the CSV to upload to GA eg clientId : dimension1.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"storage_bucket",
"storage_name_object",
"impersonation_chain",
)
def __init__(
self,
storage_bucket: str,
storage_name_object: str,
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
custom_dimension_header_mapping: dict[str, str] | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.storage_bucket = storage_bucket
self.storage_name_object = storage_name_object
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.custom_dimension_header_mapping = custom_dimension_header_mapping or {}
self.impersonation_chain = impersonation_chain
def _modify_column_headers(
self, tmp_file_location: str, custom_dimension_header_mapping: dict[str, str]
) -> None:
# Check headers
self.log.info("Checking if file contains headers")
with open(tmp_file_location) as check_header_file:
has_header = csv.Sniffer().has_header(check_header_file.read(1024))
if not has_header:
raise NameError(
"CSV does not contain headers, please add them "
"to use the modify column headers functionality"
)
# Transform
self.log.info("Modifying column headers to be compatible for data upload")
with open(tmp_file_location) as read_file:
reader = csv.reader(read_file)
headers = next(reader)
new_headers = []
for header in headers:
if header in custom_dimension_header_mapping:
header = custom_dimension_header_mapping.get(header) # type: ignore
new_header = f"ga:{header}"
new_headers.append(new_header)
all_data = read_file.readlines()
final_headers = ",".join(new_headers) + "\n"
all_data.insert(0, final_headers)
# Save result
self.log.info("Saving transformed file")
with open(tmp_file_location, "w") as write_file:
write_file.writelines(all_data)
def execute(self, context: Context) -> None:
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
with NamedTemporaryFile("w+") as tmp_file:
# Download file from GCS
self.log.info(
"Downloading file from GCS: %s/%s ",
self.storage_bucket,
self.storage_name_object,
)
gcs_hook.download(
bucket_name=self.storage_bucket,
object_name=self.storage_name_object,
filename=tmp_file.name,
)
# Modify file
self.log.info("Modifying temporary file %s", tmp_file.name)
self._modify_column_headers(
tmp_file_location=tmp_file.name,
custom_dimension_header_mapping=self.custom_dimension_header_mapping,
)
# Upload newly formatted file to cloud storage
self.log.info(
"Uploading file to GCS: %s/%s ",
self.storage_bucket,
self.storage_name_object,
)
gcs_hook.upload(
bucket_name=self.storage_bucket,
object_name=self.storage_name_object,
filename=tmp_file.name,
)
| 21,221 | 40.611765 | 139 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/operators/display_video.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google DisplayVideo operators."""
from __future__ import annotations
import csv
import json
import shutil
import tempfile
import urllib.request
from typing import TYPE_CHECKING, Any, Sequence
from urllib.parse import urlsplit
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.marketing_platform.hooks.display_video import GoogleDisplayVideo360Hook
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleDisplayVideo360CreateQueryOperator(BaseOperator):
"""
Creates a query.
.. seealso::
For more information on how to use this operator, take a look at the guide:
``GoogleDisplayVideo360CreateQueryOperator``
.. seealso::
Check also the official API docs:
`https://developers.google.com/bid-manager/v2/queries/create`
:param body: Report object passed to the request's body as described here:
https://developers.google.com/bid-manager/v2/queries#Query
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"body",
"impersonation_chain",
)
template_ext: Sequence[str] = (".json",)
def __init__(
self,
*,
body: dict[str, Any],
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.body = body
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def prepare_template(self) -> None:
# If .json is passed then we have to read the file
if isinstance(self.body, str) and self.body.endswith(".json"):
with open(self.body) as file:
self.body = json.load(file)
def execute(self, context: Context) -> dict:
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Creating Display & Video 360 query.")
response = hook.create_query(query=self.body)
query_id = response["queryId"]
self.xcom_push(context, key="query_id", value=query_id)
self.log.info("Created query with ID: %s", query_id)
return response
class GoogleDisplayVideo360DeleteReportOperator(BaseOperator):
"""
Deletes a stored query as well as the associated stored reports.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360DeleteReportOperator`
.. seealso::
Check also the official API docs:
`https://developers.google.com/bid-manager/v2/queries/delete`
:param report_id: Report ID to delete.
:param report_name: Name of the report to delete.
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"report_id",
"impersonation_chain",
)
def __init__(
self,
*,
report_id: str | None = None,
report_name: str | None = None,
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.report_id = report_id
self.report_name = report_name
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
if report_name and report_id:
raise AirflowException("Use only one value - `report_name` or `report_id`.")
if not (report_name or report_id):
raise AirflowException("Provide one of the values: `report_name` or `report_id`.")
def execute(self, context: Context) -> None:
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
if self.report_id:
reports_ids_to_delete = [self.report_id]
else:
reports = hook.list_queries()
reports_ids_to_delete = [
report["queryId"] for report in reports if report["metadata"]["title"] == self.report_name
]
for report_id in reports_ids_to_delete:
self.log.info("Deleting report with id: %s", report_id)
hook.delete_query(query_id=report_id)
self.log.info("Report deleted.")
class GoogleDisplayVideo360DownloadReportV2Operator(BaseOperator):
"""
Retrieves a stored query.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360DownloadReportV2Operator`
.. seealso::
Check also the official API docs:
`https://developers.google.com/bid-manager/v2/queries/get`
:param report_id: Report ID to retrieve.
:param bucket_name: The bucket to upload to.
:param report_name: The report name to set when uploading the local file.
:param chunk_size: File will be downloaded in chunks of this many bytes.
:param gzip: Option to compress local file or file data for upload
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"query_id",
"report_id",
"bucket_name",
"report_name",
"impersonation_chain",
)
def __init__(
self,
*,
query_id: str,
report_id: str,
bucket_name: str,
report_name: str | None = None,
gzip: bool = True,
chunk_size: int = 10 * 1024 * 1024,
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.query_id = query_id
self.report_id = report_id
self.chunk_size = chunk_size
self.gzip = gzip
self.bucket_name = bucket_name
self.report_name = report_name
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def _resolve_file_name(self, name: str) -> str:
new_name = name if name.endswith(".csv") else f"{name}.csv"
new_name = f"{new_name}.gz" if self.gzip else new_name
return new_name
@staticmethod
def _set_bucket_name(name: str) -> str:
bucket = name if not name.startswith("gs://") else name[5:]
return bucket.strip("/")
def execute(self, context: Context):
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
resource = hook.get_report(query_id=self.query_id, report_id=self.report_id)
status = resource.get("metadata", {}).get("status", {}).get("state")
if resource and status not in ["DONE", "FAILED"]:
raise AirflowException(f"Report {self.report_id} for query {self.query_id} is still running")
# If no custom report_name provided, use DV360 name
file_url = resource["metadata"]["googleCloudStoragePath"]
report_name = self.report_name or urlsplit(file_url).path.split("/")[-1]
report_name = self._resolve_file_name(report_name)
# Download the report
self.log.info("Starting downloading report %s", self.report_id)
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
with urllib.request.urlopen(file_url) as response:
shutil.copyfileobj(response, temp_file, length=self.chunk_size)
temp_file.flush()
# Upload the local file to bucket
bucket_name = self._set_bucket_name(self.bucket_name)
gcs_hook.upload(
bucket_name=bucket_name,
object_name=report_name,
gzip=self.gzip,
filename=temp_file.name,
mime_type="text/csv",
)
self.log.info(
"Report %s was saved in bucket %s as %s.",
self.report_id,
self.bucket_name,
report_name,
)
self.xcom_push(context, key="report_name", value=report_name)
class GoogleDisplayVideo360RunQueryOperator(BaseOperator):
"""
Runs a stored query to generate a report.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360RunQueryOperator`
.. seealso::
Check also the official API docs:
`https://developers.google.com/bid-manager/v2/queries/run`
:param report_id: Report ID to run.
:param parameters: Parameters for running a report as described here:
https://developers.google.com/bid-manager/v2/queries/run
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"query_id",
"parameters",
"impersonation_chain",
)
def __init__(
self,
*,
query_id: str,
parameters: dict[str, Any] | None = None,
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.query_id = query_id
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.parameters = parameters
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> dict:
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
self.log.info(
"Running query %s with the following parameters:\n %s",
self.query_id,
self.parameters,
)
response = hook.run_query(query_id=self.query_id, params=self.parameters)
self.xcom_push(context, key="query_id", value=response["key"]["queryId"])
self.xcom_push(context, key="report_id", value=response["key"]["reportId"])
return response
class GoogleDisplayVideo360DownloadLineItemsOperator(BaseOperator):
"""
Retrieves line items in CSV format.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360DownloadLineItemsOperator`
.. seealso::
Check also the official API docs:
`https://developers.google.com/bid-manager/v1.1/lineitems/downloadlineitems`
:param request_body: dictionary with parameters that should be passed into.
More information about it can be found here:
https://developers.google.com/bid-manager/v1.1/lineitems/downloadlineitems
"""
template_fields: Sequence[str] = (
"request_body",
"bucket_name",
"object_name",
"impersonation_chain",
)
def __init__(
self,
*,
request_body: dict[str, Any],
bucket_name: str,
object_name: str,
gzip: bool = False,
api_version: str = "v1.1",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.request_body = request_body
self.object_name = object_name
self.bucket_name = bucket_name
self.gzip = gzip
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> str:
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
api_version=self.api_version,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Retrieving report...")
content: list[str] = hook.download_line_items(request_body=self.request_body)
with tempfile.NamedTemporaryFile("w+") as temp_file:
writer = csv.writer(temp_file)
writer.writerows(content)
temp_file.flush()
gcs_hook.upload(
bucket_name=self.bucket_name,
object_name=self.object_name,
filename=temp_file.name,
mime_type="text/csv",
gzip=self.gzip,
)
return f"{self.bucket_name}/{self.object_name}"
class GoogleDisplayVideo360UploadLineItemsOperator(BaseOperator):
"""
Uploads line items in CSV format.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360UploadLineItemsOperator`
.. seealso::
Check also the official API docs:
`https://developers.google.com/bid-manager/v1.1/lineitems/uploadlineitems`
:param request_body: request to upload line items.
:param bucket_name: The bucket form data is downloaded.
:param object_name: The object to fetch.
:param filename: The filename to fetch.
:param dry_run: Upload status without actually persisting the line items.
"""
template_fields: Sequence[str] = (
"bucket_name",
"object_name",
"impersonation_chain",
)
def __init__(
self,
*,
bucket_name: str,
object_name: str,
api_version: str = "v1.1",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.bucket_name = bucket_name
self.object_name = object_name
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> None:
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Uploading file %s...")
# Saving file in the temporary directory,
# downloaded file from the GCS could be a 1GB size or even more
with tempfile.NamedTemporaryFile("w+") as f:
line_items = gcs_hook.download(
bucket_name=self.bucket_name,
object_name=self.object_name,
filename=f.name,
)
f.flush()
hook.upload_line_items(line_items=line_items)
class GoogleDisplayVideo360CreateSDFDownloadTaskOperator(BaseOperator):
"""
Creates SDF operation task.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360CreateSDFDownloadTaskOperator`
.. seealso::
Check also the official API docs:
`https://developers.google.com/display-video/api/reference/rest`
:param version: The SDF version of the downloaded file.
:param partner_id: The ID of the partner to download SDF for.
:param advertiser_id: The ID of the advertiser to download SDF for.
:param parent_entity_filter: Filters on selected file types.
:param id_filter: Filters on entities by their entity IDs.
:param inventory_source_filter: Filters on Inventory Sources by their IDs.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"body_request",
"impersonation_chain",
)
def __init__(
self,
*,
body_request: dict[str, Any],
api_version: str = "v1",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.body_request = body_request
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> dict[str, Any]:
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Creating operation for SDF download task...")
operation = hook.create_sdf_download_operation(body_request=self.body_request)
name = operation["name"]
self.xcom_push(context, key="name", value=name)
self.log.info("Created SDF operation with name: %s", name)
return operation
class GoogleDisplayVideo360SDFtoGCSOperator(BaseOperator):
"""
Download SDF media and save it in the Google Cloud Storage.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360SDFtoGCSOperator`
.. seealso::
Check also the official API docs:
`https://developers.google.com/display-video/api/reference/rest`
:param version: The SDF version of the downloaded file.
:param partner_id: The ID of the partner to download SDF for.
:param advertiser_id: The ID of the advertiser to download SDF for.
:param parent_entity_filter: Filters on selected file types.
:param id_filter: Filters on entities by their entity IDs.
:param inventory_source_filter: Filters on Inventory Sources by their IDs.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"operation_name",
"bucket_name",
"object_name",
"impersonation_chain",
)
def __init__(
self,
*,
operation_name: str,
bucket_name: str,
object_name: str,
gzip: bool = False,
api_version: str = "v1",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.operation_name = operation_name
self.bucket_name = bucket_name
self.object_name = object_name
self.gzip = gzip
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> str:
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Retrieving operation...")
operation_state = hook.get_sdf_download_operation(operation_name=self.operation_name)
self.log.info("Creating file for upload...")
media = hook.download_media(resource_name=operation_state["response"]["resourceName"])
self.log.info("Sending file to the Google Cloud Storage...")
with tempfile.NamedTemporaryFile() as temp_file:
hook.download_content_from_request(temp_file, media, chunk_size=1024 * 1024)
temp_file.flush()
gcs_hook.upload(
bucket_name=self.bucket_name,
object_name=self.object_name,
filename=temp_file.name,
gzip=self.gzip,
)
return f"{self.bucket_name}/{self.object_name}"
| 28,017 | 38.968616 | 106 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/operators/search_ads.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Search Ads operators."""
from __future__ import annotations
import json
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Any, Sequence
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.marketing_platform.hooks.search_ads import GoogleSearchAdsHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleSearchAdsInsertReportOperator(BaseOperator):
"""
Inserts a report request into the reporting system.
.. seealso:
For API documentation check:
https://developers.google.com/search-ads/v2/reference/reports/request
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleSearchAdsInsertReportOperator`
:param report: Report to be generated
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"report",
"impersonation_chain",
)
template_ext: Sequence[str] = (".json",)
def __init__(
self,
*,
report: dict[str, Any],
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.report = report
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def prepare_template(self) -> None:
# If .json is passed then we have to read the file
if isinstance(self.report, str) and self.report.endswith(".json"):
with open(self.report) as file:
self.report = json.load(file)
def execute(self, context: Context):
hook = GoogleSearchAdsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Generating Search Ads report")
response = hook.insert_report(report=self.report)
report_id = response.get("id")
self.xcom_push(context, key="report_id", value=report_id)
self.log.info("Report generated, id: %s", report_id)
return response
class GoogleSearchAdsDownloadReportOperator(BaseOperator):
"""
Downloads a report to GCS bucket.
.. seealso:
For API documentation check:
https://developers.google.com/search-ads/v2/reference/reports/getFile
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleSearchAdsGetfileReportOperator`
:param report_id: ID of the report.
:param bucket_name: The bucket to upload to.
:param report_name: The report name to set when uploading the local file. If not provided then
report_id is used.
:param gzip: Option to compress local file or file data for upload
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"report_name",
"report_id",
"bucket_name",
"impersonation_chain",
)
def __init__(
self,
*,
report_id: str,
bucket_name: str,
report_name: str | None = None,
gzip: bool = True,
chunk_size: int = 10 * 1024 * 1024,
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.report_id = report_id
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.report_id = report_id
self.chunk_size = chunk_size
self.gzip = gzip
self.bucket_name = bucket_name
self.report_name = report_name
self.impersonation_chain = impersonation_chain
def _resolve_file_name(self, name: str) -> str:
csv = ".csv"
gzip = ".gz"
if not name.endswith(csv):
name += csv
if self.gzip:
name += gzip
return name
@staticmethod
def _set_bucket_name(name: str) -> str:
bucket = name if not name.startswith("gs://") else name[5:]
return bucket.strip("/")
@staticmethod
def _handle_report_fragment(fragment: bytes) -> bytes:
fragment_records = fragment.split(b"\n", 1)
if len(fragment_records) > 1:
return fragment_records[1]
return b""
def execute(self, context: Context):
hook = GoogleSearchAdsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
# Resolve file name of the report
report_name = self.report_name or self.report_id
report_name = self._resolve_file_name(report_name)
response = hook.get(report_id=self.report_id)
if not response["isReportReady"]:
raise AirflowException(f"Report {self.report_id} is not ready yet")
# Resolve report fragments
fragments_count = len(response["files"])
# Download chunks of report's data
self.log.info("Downloading Search Ads report %s", self.report_id)
with NamedTemporaryFile() as temp_file:
for i in range(fragments_count):
byte_content = hook.get_file(report_fragment=i, report_id=self.report_id)
fragment = byte_content if i == 0 else self._handle_report_fragment(byte_content)
temp_file.write(fragment)
temp_file.flush()
bucket_name = self._set_bucket_name(self.bucket_name)
gcs_hook.upload(
bucket_name=bucket_name,
object_name=report_name,
gzip=self.gzip,
filename=temp_file.name,
)
self.xcom_push(context, key="file_name", value=report_name)
| 9,293 | 38.548936 | 98 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/operators/campaign_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google CampaignManager operators."""
from __future__ import annotations
import json
import tempfile
import uuid
from typing import TYPE_CHECKING, Any, Sequence
from googleapiclient import http
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.marketing_platform.hooks.campaign_manager import GoogleCampaignManagerHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleCampaignManagerDeleteReportOperator(BaseOperator):
"""
Deletes a report by its ID.
.. seealso::
Check official API docs:
`https://developers.google.com/doubleclick-advertisers/rest/v4/reports/delete`
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleCampaignManagerDeleteReportOperator`
:param profile_id: The DFA user profile ID.
:param report_name: The name of the report to delete.
:param report_id: The ID of the report.
:param api_version: The version of the api that will be requested, for example 'v4'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"profile_id",
"report_id",
"report_name",
"api_version",
"gcp_conn_id",
"delegate_to",
"impersonation_chain",
)
def __init__(
self,
*,
profile_id: str,
report_name: str | None = None,
report_id: str | None = None,
api_version: str = "v4",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
if not (report_name or report_id):
raise AirflowException("Please provide `report_name` or `report_id`.")
if report_name and report_id:
raise AirflowException("Please provide only one parameter `report_name` or `report_id`.")
self.profile_id = profile_id
self.report_name = report_name
self.report_id = report_id
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> None:
hook = GoogleCampaignManagerHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
if self.report_name:
reports = hook.list_reports(profile_id=self.profile_id)
reports_with_name = [r for r in reports if r["name"] == self.report_name]
for report in reports_with_name:
report_id = report["id"]
self.log.info("Deleting Campaign Manager report: %s", report_id)
hook.delete_report(profile_id=self.profile_id, report_id=report_id)
self.log.info("Report deleted.")
elif self.report_id:
self.log.info("Deleting Campaign Manager report: %s", self.report_id)
hook.delete_report(profile_id=self.profile_id, report_id=self.report_id)
self.log.info("Report deleted.")
class GoogleCampaignManagerDownloadReportOperator(BaseOperator):
"""
Retrieves a report and uploads it to GCS bucket.
.. seealso::
Check official API docs:
`https://developers.google.com/doubleclick-advertisers/rest/v4/reports/get`
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleCampaignManagerDownloadReportOperator`
:param profile_id: The DFA user profile ID.
:param report_id: The ID of the report.
:param file_id: The ID of the report file.
:param bucket_name: The bucket to upload to.
:param report_name: The report name to set when uploading the local file.
:param gzip: Option to compress local file or file data for upload
:param chunk_size: File will be downloaded in chunks of this many bytes.
:param api_version: The version of the api that will be requested, for example 'v4'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"profile_id",
"report_id",
"file_id",
"bucket_name",
"report_name",
"chunk_size",
"api_version",
"gcp_conn_id",
"delegate_to",
"impersonation_chain",
)
def __init__(
self,
*,
profile_id: str,
report_id: str,
file_id: str,
bucket_name: str,
report_name: str | None = None,
gzip: bool = True,
chunk_size: int = 10 * 1024 * 1024,
api_version: str = "v4",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.profile_id = profile_id
self.report_id = report_id
self.file_id = file_id
self.api_version = api_version
self.chunk_size = chunk_size
self.gzip = gzip
self.bucket_name = bucket_name
self.report_name = report_name
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def _resolve_file_name(self, name: str) -> str:
csv = ".csv"
gzip = ".gz"
if not name.endswith(csv):
name += csv
if self.gzip:
name += gzip
return name
@staticmethod
def _set_bucket_name(name: str) -> str:
bucket = name if not name.startswith("gs://") else name[5:]
return bucket.strip("/")
def execute(self, context: Context) -> None:
hook = GoogleCampaignManagerHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
# Get name of the report
report = hook.get_report(file_id=self.file_id, profile_id=self.profile_id, report_id=self.report_id)
report_name = self.report_name or report.get("fileName", str(uuid.uuid4()))
report_name = self._resolve_file_name(report_name)
# Download the report
self.log.info("Starting downloading report %s", self.report_id)
request = hook.get_report_file(
profile_id=self.profile_id, report_id=self.report_id, file_id=self.file_id
)
with tempfile.NamedTemporaryFile() as temp_file:
downloader = http.MediaIoBaseDownload(fd=temp_file, request=request, chunksize=self.chunk_size)
download_finished = False
while not download_finished:
_, download_finished = downloader.next_chunk()
temp_file.flush()
# Upload the local file to bucket
bucket_name = self._set_bucket_name(self.bucket_name)
gcs_hook.upload(
bucket_name=bucket_name,
object_name=report_name,
gzip=self.gzip,
filename=temp_file.name,
mime_type="text/csv",
)
self.xcom_push(context, key="report_name", value=report_name)
class GoogleCampaignManagerInsertReportOperator(BaseOperator):
"""
Creates a report.
.. seealso::
Check official API docs:
`https://developers.google.com/doubleclick-advertisers/rest/v4/reports/insert`
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleCampaignManagerInsertReportOperator`
:param profile_id: The DFA user profile ID.
:param report: Report to be created.
:param api_version: The version of the api that will be requested, for example 'v4'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"profile_id",
"report",
"api_version",
"gcp_conn_id",
"delegate_to",
"impersonation_chain",
)
template_ext: Sequence[str] = (".json",)
def __init__(
self,
*,
profile_id: str,
report: dict[str, Any],
api_version: str = "v4",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.profile_id = profile_id
self.report = report
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def prepare_template(self) -> None:
# If .json is passed then we have to read the file
if isinstance(self.report, str) and self.report.endswith(".json"):
with open(self.report) as file:
self.report = json.load(file)
def execute(self, context: Context):
hook = GoogleCampaignManagerHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Inserting Campaign Manager report.")
response = hook.insert_report(profile_id=self.profile_id, report=self.report)
report_id = response.get("id")
self.xcom_push(context, key="report_id", value=report_id)
self.log.info("Report successfully inserted. Report id: %s", report_id)
return response
class GoogleCampaignManagerRunReportOperator(BaseOperator):
"""
Runs a report.
.. seealso::
Check official API docs:
`https://developers.google.com/doubleclick-advertisers/rest/v4/reports/run`
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleCampaignManagerRunReportOperator`
:param profile_id: The DFA profile ID.
:param report_id: The ID of the report.
:param synchronous: If set and true, tries to run the report synchronously.
:param api_version: The version of the api that will be requested, for example 'v4'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"profile_id",
"report_id",
"synchronous",
"api_version",
"gcp_conn_id",
"delegate_to",
"impersonation_chain",
)
def __init__(
self,
*,
profile_id: str,
report_id: str,
synchronous: bool = False,
api_version: str = "v4",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.profile_id = profile_id
self.report_id = report_id
self.synchronous = synchronous
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
hook = GoogleCampaignManagerHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Running report %s", self.report_id)
response = hook.run_report(
profile_id=self.profile_id,
report_id=self.report_id,
synchronous=self.synchronous,
)
file_id = response.get("id")
self.xcom_push(context, key="file_id", value=file_id)
self.log.info("Report file id: %s", file_id)
return response
class GoogleCampaignManagerBatchInsertConversionsOperator(BaseOperator):
"""
Inserts conversions.
.. seealso::
Check official API docs:
`https://developers.google.com/doubleclick-advertisers/rest/v4/conversions/batchinsert`
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleCampaignManagerBatchInsertConversionsOperator`
:param profile_id: User profile ID associated with this request.
:param conversions: Conversions to insert, should be type of Conversion:
https://developers.google.com/doubleclick-advertisers/rest/v4/conversions
:param encryption_entity_type: The encryption entity type. This should match the encryption
configuration for ad serving or Data Transfer.
:param encryption_entity_id: The encryption entity ID. This should match the encryption
configuration for ad serving or Data Transfer.
:param encryption_source: Describes whether the encrypted cookie was received from ad serving
(the %m macro) or from Data Transfer.
:param max_failed_inserts: The maximum number of conversions that failed to be inserted
:param api_version: The version of the api that will be requested, for example 'v4'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"profile_id",
"conversions",
"encryption_entity_type",
"encryption_entity_id",
"encryption_source",
"impersonation_chain",
)
def __init__(
self,
*,
profile_id: str,
conversions: list[dict[str, Any]],
encryption_entity_type: str,
encryption_entity_id: int,
encryption_source: str,
max_failed_inserts: int = 0,
api_version: str = "v4",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.profile_id = profile_id
self.conversions = conversions
self.encryption_entity_type = encryption_entity_type
self.encryption_entity_id = encryption_entity_id
self.encryption_source = encryption_source
self.max_failed_inserts = max_failed_inserts
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
hook = GoogleCampaignManagerHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
response = hook.conversions_batch_insert(
profile_id=self.profile_id,
conversions=self.conversions,
encryption_entity_type=self.encryption_entity_type,
encryption_entity_id=self.encryption_entity_id,
encryption_source=self.encryption_source,
max_failed_inserts=self.max_failed_inserts,
)
return response
class GoogleCampaignManagerBatchUpdateConversionsOperator(BaseOperator):
"""
Updates existing conversions.
.. seealso::
Check official API docs:
`https://developers.google.com/doubleclick-advertisers/rest/v4/conversions/batchupdate`
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleCampaignManagerBatchUpdateConversionsOperator`
:param profile_id: User profile ID associated with this request.
:param conversions: Conversations to update, should be type of Conversion:
https://developers.google.com/doubleclick-advertisers/rest/v4/conversions
:param encryption_entity_type: The encryption entity type. This should match the encryption
configuration for ad serving or Data Transfer.
:param encryption_entity_id: The encryption entity ID. This should match the encryption
configuration for ad serving or Data Transfer.
:param encryption_source: Describes whether the encrypted cookie was received from ad serving
(the %m macro) or from Data Transfer.
:param max_failed_updates: The maximum number of conversions that failed to be updated
:param api_version: The version of the api that will be requested, for example 'v4'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"profile_id",
"conversions",
"encryption_entity_type",
"encryption_entity_id",
"encryption_source",
"impersonation_chain",
)
def __init__(
self,
*,
profile_id: str,
conversions: list[dict[str, Any]],
encryption_entity_type: str,
encryption_entity_id: int,
encryption_source: str,
max_failed_updates: int = 0,
api_version: str = "v4",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.profile_id = profile_id
self.conversions = conversions
self.encryption_entity_type = encryption_entity_type
self.encryption_entity_id = encryption_entity_id
self.encryption_source = encryption_source
self.max_failed_updates = max_failed_updates
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
hook = GoogleCampaignManagerHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
response = hook.conversions_batch_update(
profile_id=self.profile_id,
conversions=self.conversions,
encryption_entity_type=self.encryption_entity_type,
encryption_entity_id=self.encryption_entity_id,
encryption_source=self.encryption_source,
max_failed_updates=self.max_failed_updates,
)
return response
| 24,934 | 40.837248 | 108 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/hooks/analytics.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from googleapiclient.discovery import Resource, build
from googleapiclient.http import MediaFileUpload
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GoogleAnalyticsHook(GoogleBaseHook):
"""Hook for Google Analytics 360."""
def __init__(self, api_version: str = "v3", *args, **kwargs):
super().__init__(*args, **kwargs)
self.api_version = api_version
self._conn = None
def _paginate(self, resource: Resource, list_args: dict[str, Any] | None = None) -> list[dict]:
list_args = list_args or {}
result: list[dict] = []
while True:
# start index has value 1
request = resource.list(start_index=len(result) + 1, **list_args)
response = request.execute(num_retries=self.num_retries)
result.extend(response.get("items", []))
# result is the number of fetched links from Analytics
# when all links will be added to the result
# the loop will break
if response["totalResults"] <= len(result):
break
return result
def get_conn(self) -> Resource:
"""Retrieves connection to Google Analytics 360."""
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
"analytics",
self.api_version,
http=http_authorized,
cache_discovery=False,
)
return self._conn
def list_accounts(self) -> list[dict[str, Any]]:
"""Lists accounts list from Google Analytics 360."""
self.log.info("Retrieving accounts list...")
conn = self.get_conn()
accounts = conn.management().accounts()
result = self._paginate(accounts)
return result
def get_ad_words_link(
self, account_id: str, web_property_id: str, web_property_ad_words_link_id: str
) -> dict[str, Any]:
"""
Returns a web property-Google Ads link to which the user has access.
:param account_id: ID of the account which the given web property belongs to.
:param web_property_id: Web property-Google Ads link UA-string.
:param web_property_ad_words_link_id: to retrieve the Google Ads link for.
:returns: web property-Google Ads
"""
self.log.info("Retrieving ad words links...")
ad_words_link = (
self.get_conn()
.management()
.webPropertyAdWordsLinks()
.get(
accountId=account_id,
webPropertyId=web_property_id,
webPropertyAdWordsLinkId=web_property_ad_words_link_id,
)
.execute(num_retries=self.num_retries)
)
return ad_words_link
def list_ad_words_links(self, account_id: str, web_property_id: str) -> list[dict[str, Any]]:
"""
Lists webProperty-Google Ads links for a given web property.
:param account_id: ID of the account which the given web property belongs to.
:param web_property_id: Web property UA-string to retrieve the Google Ads links for.
:returns: list of entity Google Ads links.
"""
self.log.info("Retrieving ad words list...")
conn = self.get_conn()
ads_links = conn.management().webPropertyAdWordsLinks()
list_args = {"accountId": account_id, "webPropertyId": web_property_id}
result = self._paginate(ads_links, list_args)
return result
def upload_data(
self,
file_location: str,
account_id: str,
web_property_id: str,
custom_data_source_id: str,
resumable_upload: bool = False,
) -> None:
"""
Uploads file to GA via the Data Import API.
:param file_location: The path and name of the file to upload.
:param account_id: The GA account Id to which the data upload belongs.
:param web_property_id: UA-string associated with the upload.
:param custom_data_source_id: Custom Data Source Id to which this data import belongs.
:param resumable_upload: flag to upload the file in a resumable fashion, using a
series of at least two requests.
"""
media = MediaFileUpload(
file_location,
mimetype="application/octet-stream",
resumable=resumable_upload,
)
self.log.info(
"Uploading file to GA file for accountId: %s, webPropertyId:%s and customDataSourceId:%s ",
account_id,
web_property_id,
custom_data_source_id,
)
self.get_conn().management().uploads().uploadData(
accountId=account_id,
webPropertyId=web_property_id,
customDataSourceId=custom_data_source_id,
media_body=media,
).execute()
def delete_upload_data(
self,
account_id: str,
web_property_id: str,
custom_data_source_id: str,
delete_request_body: dict[str, Any],
) -> None:
"""
Deletes the uploaded data for a given account/property/dataset.
:param account_id: The GA account Id to which the data upload belongs.
:param web_property_id: UA-string associated with the upload.
:param custom_data_source_id: Custom Data Source Id to which this data import belongs.
:param delete_request_body: Dict of customDataImportUids to delete.
"""
self.log.info(
"Deleting previous uploads to GA file for accountId:%s, "
"webPropertyId:%s and customDataSourceId:%s ",
account_id,
web_property_id,
custom_data_source_id,
)
self.get_conn().management().uploads().deleteUploadData(
accountId=account_id,
webPropertyId=web_property_id,
customDataSourceId=custom_data_source_id,
body=delete_request_body,
).execute()
def list_uploads(self, account_id, web_property_id, custom_data_source_id) -> list[dict[str, Any]]:
"""
Get list of data upload from GA.
:param account_id: The GA account Id to which the data upload belongs.
:param web_property_id: UA-string associated with the upload.
:param custom_data_source_id: Custom Data Source Id to which this data import belongs.
"""
self.log.info(
"Getting list of uploads for accountId:%s, webPropertyId:%s and customDataSourceId:%s ",
account_id,
web_property_id,
custom_data_source_id,
)
uploads = self.get_conn().management().uploads()
list_args = {
"accountId": account_id,
"webPropertyId": web_property_id,
"customDataSourceId": custom_data_source_id,
}
result = self._paginate(uploads, list_args)
return result
| 7,794 | 37.210784 | 103 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/hooks/display_video.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google DisplayVideo hook."""
from __future__ import annotations
from typing import Any, Sequence
from googleapiclient.discovery import Resource, build
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GoogleDisplayVideo360Hook(GoogleBaseHook):
"""Hook for Google Display & Video 360."""
_conn: Resource | None = None
def __init__(
self,
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.api_version = api_version
def get_conn(self) -> Resource:
"""Retrieves connection to DisplayVideo."""
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
"doubleclickbidmanager",
self.api_version,
http=http_authorized,
cache_discovery=False,
)
return self._conn
def get_conn_to_display_video(self) -> Resource:
"""Retrieves connection to DisplayVideo."""
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
"displayvideo",
self.api_version,
http=http_authorized,
cache_discovery=False,
)
return self._conn
@staticmethod
def erf_uri(partner_id, entity_type) -> list[str]:
"""
Return URI for all Entity Read Files in bucket.
For example, if you were generating a file name to retrieve the entity read file
for partner 123 accessing the line_item table from April 2, 2013, your filename
would look something like this:
gdbm-123/entity/20130402.0.LineItem.json
More information:
https://developers.google.com/bid-manager/guides/entity-read/overview
:param partner_id The numeric ID of your Partner.
:param entity_type: The type of file Partner, Advertiser, InsertionOrder,
LineItem, Creative, Pixel, InventorySource, UserList, UniversalChannel, and summary.
"""
return [f"gdbm-{partner_id}/entity/{{{{ ds_nodash }}}}.*.{entity_type}.json"]
def create_query(self, query: dict[str, Any]) -> dict:
"""
Creates a query.
:param query: Query object to be passed to request body.
"""
response = self.get_conn().queries().create(body=query).execute(num_retries=self.num_retries)
return response
def delete_query(self, query_id: str) -> None:
"""
Deletes a stored query as well as the associated stored reports.
:param query_id: Query ID to delete.
"""
self.get_conn().queries().delete(queryId=query_id).execute(num_retries=self.num_retries)
def get_query(self, query_id: str) -> dict:
"""
Retrieves a stored query.
:param query_id: Query ID to retrieve.
"""
response = self.get_conn().queries().get(queryId=query_id).execute(num_retries=self.num_retries)
return response
def list_queries(self) -> list[dict]:
"""Retrieves stored queries."""
response = self.get_conn().queries().list().execute(num_retries=self.num_retries)
return response.get("queries", [])
def run_query(self, query_id: str, params: dict[str, Any] | None) -> dict:
"""
Runs a stored query to generate a report.
:param query_id: Query ID to run.
:param params: Parameters for the report.
"""
return (
self.get_conn().queries().run(queryId=query_id, body=params).execute(num_retries=self.num_retries)
)
def get_report(self, query_id: str, report_id: str) -> dict:
"""
Retrieves a report.
:param query_id: Query ID for which report was generated.
:param report_id: Report ID to retrieve.
"""
return (
self.get_conn()
.queries()
.reports()
.get(queryId=query_id, reportId=report_id)
.execute(num_retries=self.num_retries)
)
def upload_line_items(self, line_items: Any) -> list[dict[str, Any]]:
"""
Uploads line items in CSV format.
:param line_items: downloaded data from GCS and passed to the body request
:return: response body.
"""
request_body = {
"lineItems": line_items,
"dryRun": False,
"format": "CSV",
}
response = (
self.get_conn()
.lineitems()
.uploadlineitems(body=request_body)
.execute(num_retries=self.num_retries)
)
return response
def download_line_items(self, request_body: dict[str, Any]) -> list[Any]:
"""
Retrieves line items in CSV format.
:param request_body: dictionary with parameters that should be passed into.
More information about it can be found here:
https://developers.google.com/bid-manager/v1.1/lineitems/downloadlineitems
"""
response = (
self.get_conn()
.lineitems()
.downloadlineitems(body=request_body)
.execute(num_retries=self.num_retries)
)
return response["lineItems"]
def create_sdf_download_operation(self, body_request: dict[str, Any]) -> dict[str, Any]:
"""
Creates an SDF Download Task and Returns an Operation.
:param body_request: Body request.
More information about body request n be found here:
https://developers.google.com/display-video/api/reference/rest/v1/sdfdownloadtasks/create
"""
result = (
self.get_conn_to_display_video()
.sdfdownloadtasks()
.create(body=body_request)
.execute(num_retries=self.num_retries)
)
return result
def get_sdf_download_operation(self, operation_name: str):
"""
Gets the latest state of an asynchronous SDF download task operation.
:param operation_name: The name of the operation resource.
"""
result = (
self.get_conn_to_display_video()
.sdfdownloadtasks()
.operations()
.get(name=operation_name)
.execute(num_retries=self.num_retries)
)
return result
def download_media(self, resource_name: str):
"""
Downloads media.
:param resource_name: of the media that is being downloaded.
"""
request = self.get_conn_to_display_video().media().download_media(resourceName=resource_name)
return request
| 7,730 | 33.513393 | 110 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/hooks/search_ads.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Search Ads 360 hook."""
from __future__ import annotations
from typing import Any, Sequence
from googleapiclient.discovery import build
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GoogleSearchAdsHook(GoogleBaseHook):
"""Hook for Google Search Ads 360."""
_conn: build | None = None
def __init__(
self,
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.api_version = api_version
def get_conn(self):
"""Retrieves connection to Google SearchAds."""
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
"doubleclicksearch",
self.api_version,
http=http_authorized,
cache_discovery=False,
)
return self._conn
def insert_report(self, report: dict[str, Any]) -> Any:
"""
Inserts a report request into the reporting system.
:param report: Report to be generated.
"""
response = self.get_conn().reports().request(body=report).execute(num_retries=self.num_retries)
return response
def get(self, report_id: str) -> Any:
"""
Polls for the status of a report request.
:param report_id: ID of the report request being polled.
"""
response = self.get_conn().reports().get(reportId=report_id).execute(num_retries=self.num_retries)
return response
def get_file(self, report_fragment: int, report_id: str) -> Any:
"""
Downloads a report file encoded in UTF-8.
:param report_fragment: The index of the report fragment to download.
:param report_id: ID of the report.
"""
response = (
self.get_conn()
.reports()
.getFile(reportFragment=report_fragment, reportId=report_id)
.execute(num_retries=self.num_retries)
)
return response
| 3,099 | 33.065934 | 106 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/hooks/campaign_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Campaign Manager hook."""
from __future__ import annotations
from typing import Any, Sequence
from googleapiclient import http
from googleapiclient.discovery import Resource, build
from airflow.exceptions import AirflowException
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GoogleCampaignManagerHook(GoogleBaseHook):
"""Hook for Google Campaign Manager."""
_conn: Resource | None = None
def __init__(
self,
api_version: str = "v4",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.api_version = api_version
def get_conn(self) -> Resource:
"""Retrieves connection to Campaign Manager."""
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
"dfareporting",
self.api_version,
http=http_authorized,
cache_discovery=False,
)
return self._conn
def delete_report(self, profile_id: str, report_id: str) -> Any:
"""
Deletes a report by its ID.
:param profile_id: The DFA user profile ID.
:param report_id: The ID of the report.
"""
response = (
self.get_conn()
.reports()
.delete(profileId=profile_id, reportId=report_id)
.execute(num_retries=self.num_retries)
)
return response
def insert_report(self, profile_id: str, report: dict[str, Any]) -> Any:
"""
Creates a report.
:param profile_id: The DFA user profile ID.
:param report: The report resource to be inserted.
"""
response = (
self.get_conn()
.reports()
.insert(profileId=profile_id, body=report)
.execute(num_retries=self.num_retries)
)
return response
def list_reports(
self,
profile_id: str,
max_results: int | None = None,
scope: str | None = None,
sort_field: str | None = None,
sort_order: str | None = None,
) -> list[dict]:
"""
Retrieves list of reports.
:param profile_id: The DFA user profile ID.
:param max_results: Maximum number of results to return.
:param scope: The scope that defines which results are returned.
:param sort_field: The field by which to sort the list.
:param sort_order: Order of sorted results.
"""
reports: list[dict] = []
conn = self.get_conn()
request = conn.reports().list(
profileId=profile_id,
maxResults=max_results,
scope=scope,
sortField=sort_field,
sortOrder=sort_order,
)
while request is not None:
response = request.execute(num_retries=self.num_retries)
reports.extend(response.get("items", []))
request = conn.reports().list_next(previous_request=request, previous_response=response)
return reports
def patch_report(self, profile_id: str, report_id: str, update_mask: dict) -> Any:
"""
Updates a report. This method supports patch semantics.
:param profile_id: The DFA user profile ID.
:param report_id: The ID of the report.
:param update_mask: The relevant portions of a report resource,
according to the rules of patch semantics.
"""
response = (
self.get_conn()
.reports()
.patch(profileId=profile_id, reportId=report_id, body=update_mask)
.execute(num_retries=self.num_retries)
)
return response
def run_report(self, profile_id: str, report_id: str, synchronous: bool | None = None) -> Any:
"""
Runs a report.
:param profile_id: The DFA profile ID.
:param report_id: The ID of the report.
:param synchronous: If set and true, tries to run the report synchronously.
"""
response = (
self.get_conn()
.reports()
.run(profileId=profile_id, reportId=report_id, synchronous=synchronous)
.execute(num_retries=self.num_retries)
)
return response
def update_report(self, profile_id: str, report_id: str) -> Any:
"""
Updates a report.
:param profile_id: The DFA user profile ID.
:param report_id: The ID of the report.
"""
response = (
self.get_conn()
.reports()
.update(profileId=profile_id, reportId=report_id)
.execute(num_retries=self.num_retries)
)
return response
def get_report(self, file_id: str, profile_id: str, report_id: str) -> Any:
"""
Retrieves a report file.
:param profile_id: The DFA user profile ID.
:param report_id: The ID of the report.
:param file_id: The ID of the report file.
"""
response = (
self.get_conn()
.reports()
.files()
.get(fileId=file_id, profileId=profile_id, reportId=report_id)
.execute(num_retries=self.num_retries)
)
return response
def get_report_file(self, file_id: str, profile_id: str, report_id: str) -> http.HttpRequest:
"""
Retrieves a media part of report file.
:param profile_id: The DFA user profile ID.
:param report_id: The ID of the report.
:param file_id: The ID of the report file.
:return: googleapiclient.http.HttpRequest
"""
request = (
self.get_conn()
.reports()
.files()
.get_media(fileId=file_id, profileId=profile_id, reportId=report_id)
)
return request
@staticmethod
def _conversions_batch_request(
conversions: list[dict[str, Any]],
encryption_entity_type: str,
encryption_entity_id: int,
encryption_source: str,
kind: str,
) -> dict[str, Any]:
return {
"kind": kind,
"conversions": conversions,
"encryptionInfo": {
"kind": "dfareporting#encryptionInfo",
"encryptionEntityType": encryption_entity_type,
"encryptionEntityId": encryption_entity_id,
"encryptionSource": encryption_source,
},
}
def conversions_batch_insert(
self,
profile_id: str,
conversions: list[dict[str, Any]],
encryption_entity_type: str,
encryption_entity_id: int,
encryption_source: str,
max_failed_inserts: int = 0,
) -> Any:
"""
Inserts conversions.
:param profile_id: User profile ID associated with this request.
:param conversions: Conversations to insert, should by type of Conversation:
https://developers.google.com/doubleclick-advertisers/rest/v4/conversions/batchinsert
:param encryption_entity_type: The encryption entity type. This should match the encryption
configuration for ad serving or Data Transfer.
:param encryption_entity_id: The encryption entity ID. This should match the encryption
configuration for ad serving or Data Transfer.
:param encryption_source: Describes whether the encrypted cookie was received from ad serving
(the %m macro) or from Data Transfer.
:param max_failed_inserts: The maximum number of conversions that failed to be inserted
"""
response = (
self.get_conn()
.conversions()
.batchinsert(
profileId=profile_id,
body=self._conversions_batch_request(
conversions=conversions,
encryption_entity_type=encryption_entity_type,
encryption_entity_id=encryption_entity_id,
encryption_source=encryption_source,
kind="dfareporting#conversionsBatchInsertRequest",
),
)
.execute(num_retries=self.num_retries)
)
if response.get("hasFailures", False):
errored_conversions = [stat["errors"] for stat in response["status"] if "errors" in stat]
if len(errored_conversions) > max_failed_inserts:
raise AirflowException(errored_conversions)
return response
def conversions_batch_update(
self,
profile_id: str,
conversions: list[dict[str, Any]],
encryption_entity_type: str,
encryption_entity_id: int,
encryption_source: str,
max_failed_updates: int = 0,
) -> Any:
"""
Updates existing conversions.
:param profile_id: User profile ID associated with this request.
:param conversions: Conversations to update, should by type of Conversation:
https://developers.google.com/doubleclick-advertisers/rest/v4/conversions/batchupdate
:param encryption_entity_type: The encryption entity type. This should match the encryption
configuration for ad serving or Data Transfer.
:param encryption_entity_id: The encryption entity ID. This should match the encryption
configuration for ad serving or Data Transfer.
:param encryption_source: Describes whether the encrypted cookie was received from ad serving
(the %m macro) or from Data Transfer.
:param max_failed_updates: The maximum number of conversions that failed to be updated
"""
response = (
self.get_conn()
.conversions()
.batchupdate(
profileId=profile_id,
body=self._conversions_batch_request(
conversions=conversions,
encryption_entity_type=encryption_entity_type,
encryption_entity_id=encryption_entity_id,
encryption_source=encryption_source,
kind="dfareporting#conversionsBatchUpdateRequest",
),
)
.execute(num_retries=self.num_retries)
)
if response.get("hasFailures", False):
errored_conversions = [stat["errors"] for stat in response["status"] if "errors" in stat]
if len(errored_conversions) > max_failed_updates:
raise AirflowException(errored_conversions)
return response
| 11,597 | 36.173077 | 101 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/sensors/display_video.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Sensor for detecting the completion of DV360 reports."""
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow import AirflowException
from airflow.providers.google.marketing_platform.hooks.display_video import GoogleDisplayVideo360Hook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleDisplayVideo360GetSDFDownloadOperationSensor(BaseSensorOperator):
"""
Sensor for detecting the completion of SDF operation.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360GetSDFDownloadOperationSensor`
:param operation_name: The name of the operation resource
:param api_version: The version of the api that will be requested for example 'v1'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"operation_name",
"impersonation_chain",
)
def __init__(
self,
operation_name: str,
api_version: str = "v1",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
mode: str = "reschedule",
poke_interval: int = 60 * 5,
impersonation_chain: str | Sequence[str] | None = None,
*args,
**kwargs,
) -> None:
super().__init__(*args, **kwargs)
self.mode = mode
self.poke_interval = poke_interval
self.operation_name = operation_name
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def poke(self, context: Context) -> bool:
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
operation = hook.get_sdf_download_operation(operation_name=self.operation_name)
if "error" in operation:
raise AirflowException(f'The operation finished in error with {operation["error"]}')
if operation and operation.get("done"):
return True
return False
class GoogleDisplayVideo360RunQuerySensor(BaseSensorOperator):
"""
Sensor for detecting the completion of DV360 reports for API v2.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleDisplayVideo360RunQuerySensor`
:param query_id: Query ID for which report was generated
:param report_id: Report ID for which you want to wait
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"query_id",
"report_id",
"impersonation_chain",
)
def __init__(
self,
*,
query_id: str,
report_id: str,
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.query_id = query_id
self.report_id = report_id
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def poke(self, context: Context) -> bool:
hook = GoogleDisplayVideo360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
response = hook.get_report(query_id=self.query_id, report_id=self.report_id)
status = response.get("metadata", {}).get("status", {}).get("state")
self.log.info(f"STATUS OF THE REPORT {self.report_id} FOR QUERY {self.query_id}: {status}")
if response and status in ["DONE", "FAILED"]:
return True
return False
| 6,805 | 41.273292 | 101 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/sensors/search_ads.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Search Ads sensor."""
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.providers.google.marketing_platform.hooks.search_ads import GoogleSearchAdsHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleSearchAdsReportSensor(BaseSensorOperator):
"""
Polls for the status of a report request.
.. seealso::
For API documentation check:
https://developers.google.com/search-ads/v2/reference/reports/get
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleSearchAdsReportSensor`
:param report_id: ID of the report request being polled.
:param api_version: The version of the api that will be requested for example 'v3'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"report_id",
"impersonation_chain",
)
def __init__(
self,
*,
report_id: str,
api_version: str = "v2",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
mode: str = "reschedule",
poke_interval: int = 5 * 60,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(mode=mode, poke_interval=poke_interval, **kwargs)
self.report_id = report_id
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def poke(self, context: Context):
hook = GoogleSearchAdsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Checking status of %s report.", self.report_id)
response = hook.get(report_id=self.report_id)
return response["isReportReady"]
| 3,795 | 40.26087 | 93 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/sensors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/sensors/campaign_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Campaign Manager sensor."""
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.providers.google.marketing_platform.hooks.campaign_manager import GoogleCampaignManagerHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleCampaignManagerReportSensor(BaseSensorOperator):
"""
Check if report is ready.
.. seealso::
Check official API docs:
https://developers.google.com/doubleclick-advertisers/rest/v4/reports/get
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleCampaignManagerReportSensor`
:param profile_id: The DFA user profile ID.
:param report_id: The ID of the report.
:param file_id: The ID of the report file.
:param api_version: The version of the api that will be requested, for example 'v4'.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"profile_id",
"report_id",
"file_id",
"impersonation_chain",
)
def poke(self, context: Context) -> bool:
hook = GoogleCampaignManagerHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
response = hook.get_report(profile_id=self.profile_id, report_id=self.report_id, file_id=self.file_id)
self.log.info("Report status: %s", response["status"])
return response["status"] not in ("PROCESSING", "QUEUED")
def __init__(
self,
*,
profile_id: str,
report_id: str,
file_id: str,
api_version: str = "v4",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
mode: str = "reschedule",
poke_interval: int = 60 * 5,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.mode = mode
self.poke_interval = poke_interval
self.profile_id = profile_id
self.report_id = report_id
self.file_id = file_id
self.api_version = api_version
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
| 4,164 | 39.833333 | 110 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/example_dags/example_display_video.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG that shows how to use DisplayVideo.
"""
from __future__ import annotations
import os
from datetime import datetime
from typing import cast
from airflow import models
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator
from airflow.providers.google.marketing_platform.hooks.display_video import GoogleDisplayVideo360Hook
from airflow.providers.google.marketing_platform.operators.display_video import (
GoogleDisplayVideo360CreateQueryOperator,
GoogleDisplayVideo360CreateSDFDownloadTaskOperator,
GoogleDisplayVideo360DeleteReportOperator,
GoogleDisplayVideo360DownloadLineItemsOperator,
GoogleDisplayVideo360DownloadReportV2Operator,
GoogleDisplayVideo360RunQueryOperator,
GoogleDisplayVideo360SDFtoGCSOperator,
GoogleDisplayVideo360UploadLineItemsOperator,
)
from airflow.providers.google.marketing_platform.sensors.display_video import (
GoogleDisplayVideo360GetSDFDownloadOperationSensor,
GoogleDisplayVideo360RunQuerySensor,
)
# [START howto_display_video_env_variables]
BUCKET = os.environ.get("GMP_DISPLAY_VIDEO_BUCKET", "gs://INVALID BUCKET NAME")
ADVERTISER_ID = os.environ.get("GMP_ADVERTISER_ID", 1234567)
OBJECT_NAME = os.environ.get("GMP_OBJECT_NAME", "files/report.csv")
PATH_TO_UPLOAD_FILE = os.environ.get("GCP_GCS_PATH_TO_UPLOAD_FILE", "test-gcs-example.txt")
PATH_TO_SAVED_FILE = os.environ.get("GCP_GCS_PATH_TO_SAVED_FILE", "test-gcs-example-download.txt")
BUCKET_FILE_LOCATION = PATH_TO_UPLOAD_FILE.rpartition("/")[-1]
SDF_VERSION = os.environ.get("GMP_SDF_VERSION", "SDF_VERSION_5_5")
BQ_DATA_SET = os.environ.get("GMP_BQ_DATA_SET", "airflow_test")
GMP_PARTNER_ID = os.environ.get("GMP_PARTNER_ID", 123)
ENTITY_TYPE = os.environ.get("GMP_ENTITY_TYPE", "LineItem")
ERF_SOURCE_OBJECT = GoogleDisplayVideo360Hook.erf_uri(GMP_PARTNER_ID, ENTITY_TYPE)
REPORT_V2 = {
"metadata": {
"title": "Airflow Test Report",
"dataRange": {"range": "LAST_7_DAYS"},
"format": "CSV",
"sendNotification": False,
},
"params": {
"type": "STANDARD",
"groupBys": ["FILTER_DATE", "FILTER_PARTNER"],
"filters": [{"type": "FILTER_PARTNER", "value": ADVERTISER_ID}],
"metrics": ["METRIC_IMPRESSIONS", "METRIC_CLICKS"],
},
"schedule": {"frequency": "ONE_TIME"},
}
PARAMETERS = {
"dataRange": {"range": "LAST_7_DAYS"},
}
CREATE_SDF_DOWNLOAD_TASK_BODY_REQUEST: dict = {
"version": SDF_VERSION,
"advertiserId": ADVERTISER_ID,
"inventorySourceFilter": {"inventorySourceIds": []},
}
DOWNLOAD_LINE_ITEMS_REQUEST: dict = {"filterType": ADVERTISER_ID, "format": "CSV", "fileSpec": "EWF"}
# [END howto_display_video_env_variables]
START_DATE = datetime(2021, 1, 1)
with models.DAG(
"example_display_video_misc",
start_date=START_DATE,
catchup=False,
) as dag2:
# [START howto_google_display_video_upload_multiple_entity_read_files_to_big_query]
upload_erf_to_bq = GCSToBigQueryOperator(
task_id="upload_erf_to_bq",
bucket=BUCKET,
source_objects=ERF_SOURCE_OBJECT,
destination_project_dataset_table=f"{BQ_DATA_SET}.gcs_to_bq_table",
write_disposition="WRITE_TRUNCATE",
)
# [END howto_google_display_video_upload_multiple_entity_read_files_to_big_query]
# [START howto_google_display_video_download_line_items_operator]
download_line_items = GoogleDisplayVideo360DownloadLineItemsOperator(
task_id="download_line_items",
request_body=DOWNLOAD_LINE_ITEMS_REQUEST,
bucket_name=BUCKET,
object_name=OBJECT_NAME,
gzip=False,
)
# [END howto_google_display_video_download_line_items_operator]
# [START howto_google_display_video_upload_line_items_operator]
upload_line_items = GoogleDisplayVideo360UploadLineItemsOperator(
task_id="upload_line_items",
bucket_name=BUCKET,
object_name=BUCKET_FILE_LOCATION,
)
# [END howto_google_display_video_upload_line_items_operator]
with models.DAG(
"example_display_video_sdf",
start_date=START_DATE,
catchup=False,
) as dag3:
# [START howto_google_display_video_create_sdf_download_task_operator]
create_sdf_download_task = GoogleDisplayVideo360CreateSDFDownloadTaskOperator(
task_id="create_sdf_download_task", body_request=CREATE_SDF_DOWNLOAD_TASK_BODY_REQUEST
)
operation_name = '{{ task_instance.xcom_pull("create_sdf_download_task")["name"] }}'
# [END howto_google_display_video_create_sdf_download_task_operator]
# [START howto_google_display_video_wait_for_operation_sensor]
wait_for_operation = GoogleDisplayVideo360GetSDFDownloadOperationSensor(
task_id="wait_for_operation",
operation_name=operation_name,
)
# [END howto_google_display_video_wait_for_operation_sensor]
# [START howto_google_display_video_save_sdf_in_gcs_operator]
save_sdf_in_gcs = GoogleDisplayVideo360SDFtoGCSOperator(
task_id="save_sdf_in_gcs",
operation_name=operation_name,
bucket_name=BUCKET,
object_name=BUCKET_FILE_LOCATION,
gzip=False,
)
# [END howto_google_display_video_save_sdf_in_gcs_operator]
# [START howto_google_display_video_gcs_to_big_query_operator]
upload_sdf_to_big_query = GCSToBigQueryOperator(
task_id="upload_sdf_to_big_query",
bucket=BUCKET,
source_objects=[save_sdf_in_gcs.output],
destination_project_dataset_table=f"{BQ_DATA_SET}.gcs_to_bq_table",
schema_fields=[
{"name": "name", "type": "STRING", "mode": "NULLABLE"},
{"name": "post_abbr", "type": "STRING", "mode": "NULLABLE"},
],
write_disposition="WRITE_TRUNCATE",
)
# [END howto_google_display_video_gcs_to_big_query_operator]
create_sdf_download_task >> wait_for_operation >> save_sdf_in_gcs
# Task dependency created via `XComArgs`:
# save_sdf_in_gcs >> upload_sdf_to_big_query
with models.DAG(
"example_display_video_v2",
start_date=START_DATE,
catchup=False,
) as dag:
# [START howto_google_display_video_create_query_operator]
create_query_v2 = GoogleDisplayVideo360CreateQueryOperator(body=REPORT_V2, task_id="create_query")
query_id = cast(str, XComArg(create_query_v2, key="query_id"))
# [END howto_google_display_video_create_query_operator]
# [START howto_google_display_video_run_query_report_operator]
run_query_v2 = GoogleDisplayVideo360RunQueryOperator(
query_id=query_id, parameters=PARAMETERS, task_id="run_report"
)
query_id = cast(str, XComArg(run_query_v2, key="query_id"))
report_id = cast(str, XComArg(run_query_v2, key="report_id"))
# [END howto_google_display_video_run_query_report_operator]
# [START howto_google_display_video_wait_run_query_sensor]
wait_for_query = GoogleDisplayVideo360RunQuerySensor(
task_id="wait_for_query",
query_id=query_id,
report_id=report_id,
)
# [END howto_google_display_video_wait_run_query_sensor]
# [START howto_google_display_video_get_report_operator]
get_report_v2 = GoogleDisplayVideo360DownloadReportV2Operator(
query_id=query_id,
report_id=report_id,
task_id="get_report",
bucket_name=BUCKET,
report_name="test1.csv",
)
# # [END howto_google_display_video_get_report_operator]
# # [START howto_google_display_video_delete_query_report_operator]
delete_report_v2 = GoogleDisplayVideo360DeleteReportOperator(report_id=report_id, task_id="delete_report")
# # [END howto_google_display_video_delete_query_report_operator]
create_query_v2 >> run_query_v2 >> wait_for_query >> get_report_v2 >> delete_report_v2
| 8,573 | 39.253521 | 110 | py |
airflow | airflow-main/airflow/providers/google/marketing_platform/example_dags/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/suite/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/google/suite/transfers/sql_to_sheets.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import logging
import numbers
from contextlib import closing
from typing import Any, Iterable, Mapping, Sequence
from airflow.providers.common.sql.operators.sql import BaseSQLOperator
from airflow.providers.google.suite.hooks.sheets import GSheetsHook
class SQLToGoogleSheetsOperator(BaseSQLOperator):
"""
Copy data from SQL results to provided Google Spreadsheet.
:param sql: The SQL to execute.
:param spreadsheet_id: The Google Sheet ID to interact with.
:param conn_id: the connection ID used to connect to the database.
:param parameters: The parameters to render the SQL query with.
:param database: name of database which overwrite the defined one in connection
:param spreadsheet_range: The A1 notation of the values to retrieve.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"sql",
"spreadsheet_id",
"spreadsheet_range",
"impersonation_chain",
)
template_fields_renderers = {"sql": "sql"}
template_ext: Sequence[str] = (".sql",)
ui_color = "#a0e08c"
def __init__(
self,
*,
sql: str,
spreadsheet_id: str,
sql_conn_id: str,
parameters: Iterable | Mapping | None = None,
database: str | None = None,
spreadsheet_range: str = "Sheet1",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.sql = sql
self.conn_id = sql_conn_id
self.database = database
self.parameters = parameters
self.gcp_conn_id = gcp_conn_id
self.spreadsheet_id = spreadsheet_id
self.spreadsheet_range = spreadsheet_range
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def _data_prep(self, data):
for row in data:
item_list = []
for item in row:
if isinstance(item, (datetime.date, datetime.datetime)):
item = item.isoformat()
elif isinstance(item, int): # To exclude int from the number check.
pass
elif isinstance(item, numbers.Number):
item = float(item)
item_list.append(item)
yield item_list
def _get_data(self):
hook = self.get_db_hook()
with closing(hook.get_conn()) as conn, closing(conn.cursor()) as cur:
self.log.info("Executing query")
cur.execute(self.sql, self.parameters or ())
yield [field[0] for field in cur.description]
yield from self._data_prep(cur.fetchall())
def execute(self, context: Any) -> None:
self.log.info("Getting data")
values = list(self._get_data())
self.log.info("Connecting to Google")
sheet_hook = GSheetsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
if self.log.isEnabledFor(logging.INFO):
url = f"https://docs.google.com/spreadsheets/d/{self.spreadsheet_id}"
self.log.info("Uploading data to %s", url)
sheet_hook.update_values(
spreadsheet_id=self.spreadsheet_id,
range_=self.spreadsheet_range,
values=values,
)
| 5,210 | 38.180451 | 93 | py |
airflow | airflow-main/airflow/providers/google/suite/transfers/gcs_to_gdrive.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a Google Cloud Storage to Google Drive transfer operator."""
from __future__ import annotations
import tempfile
from typing import TYPE_CHECKING, Sequence
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.suite.hooks.drive import GoogleDriveHook
if TYPE_CHECKING:
from airflow.utils.context import Context
WILDCARD = "*"
class GCSToGoogleDriveOperator(BaseOperator):
"""
Copies objects from a Google Cloud Storage service to a Google Drive service, with renaming if requested.
Using this operator requires the following OAuth 2.0 scope:
.. code-block:: none
https://www.googleapis.com/auth/drive
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GCSToGoogleDriveOperator`
:param source_bucket: The source Google Cloud Storage bucket where the object is. (templated)
:param source_object: The source name of the object to copy in the Google cloud
storage bucket. (templated)
You can use only one wildcard for objects (filenames) within your bucket. The wildcard can appear
inside the object name or at the end of the object name. Appending a wildcard to the bucket name
is unsupported.
:param destination_object: The destination name of the object in the destination Google Drive
service. (templated)
If a wildcard is supplied in the source_object argument, this is the prefix that will be prepended
to the final destination objects' paths.
Note that the source path's part before the wildcard will be removed;
if it needs to be retained it should be appended to destination_object.
For example, with prefix ``foo/*`` and destination_object ``blah/``, the file ``foo/baz`` will be
copied to ``blah/baz``; to retain the prefix write the destination_object as e.g. ``blah/foo``, in
which case the copied file will be named ``blah/foo/baz``.
:param destination_folder_id: The folder ID where the destination objects will be placed. It is
an additive prefix for anything specified in destination_object.
For example if folder ID ``xXyYzZ`` is called ``foo`` and the destination is ``bar/baz``, the file
will end up in `foo/bar/baz`.
This can be used to target an existing folder that is already visible to other users. The credentials
provided must have access to this folder.
:param move_object: When move object is True, the object is moved instead of copied to the new location.
This is the equivalent of a mv command as opposed to a cp command.
:param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"source_bucket",
"source_object",
"destination_object",
"impersonation_chain",
)
ui_color = "#f0eee4"
def __init__(
self,
*,
source_bucket: str,
source_object: str,
destination_object: str | None = None,
destination_folder_id: str | None = None,
move_object: bool = False,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.source_bucket = source_bucket
self.source_object = source_object
self.destination_object = destination_object
self.destination_folder_id = destination_folder_id
self.move_object = move_object
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.gcs_hook: GCSHook | None = None
self.gdrive_hook: GoogleDriveHook | None = None
def execute(self, context: Context):
self.gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
self.gdrive_hook = GoogleDriveHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
if WILDCARD in self.source_object:
total_wildcards = self.source_object.count(WILDCARD)
if total_wildcards > 1:
error_msg = (
"Only one wildcard '*' is allowed in source_object parameter. "
f"Found {total_wildcards} in {self.source_object}."
)
raise AirflowException(error_msg)
prefix, delimiter = self.source_object.split(WILDCARD, 1)
objects = self.gcs_hook.list(self.source_bucket, prefix=prefix, delimiter=delimiter)
# TODO: After deprecating delimiter and wildcards in source objects,
# remove the previous line and uncomment the following:
# match_glob = f"**/*{delimiter}" if delimiter else None
# objects = self.gcs_hook.list(self.source_bucket, prefix=prefix, match_glob=match_glob)
for source_object in objects:
if self.destination_object is None:
destination_object = source_object
else:
destination_object = source_object.replace(prefix, self.destination_object, 1)
self._copy_single_object(source_object=source_object, destination_object=destination_object)
else:
self._copy_single_object(
source_object=self.source_object, destination_object=self.destination_object
)
def _copy_single_object(self, source_object, destination_object):
self.log.info(
"Executing copy of gs://%s/%s to gdrive://%s",
self.source_bucket,
source_object,
destination_object,
)
with tempfile.NamedTemporaryFile() as file:
filename = file.name
self.gcs_hook.download(
bucket_name=self.source_bucket, object_name=source_object, filename=filename
)
self.gdrive_hook.upload_file(
local_location=filename,
remote_location=destination_object,
folder_id=self.destination_folder_id,
)
if self.move_object:
self.gcs_hook.delete(self.source_bucket, source_object)
| 7,886 | 43.559322 | 110 | py |
airflow | airflow-main/airflow/providers/google/suite/transfers/gcs_to_sheets.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import csv
from tempfile import NamedTemporaryFile
from typing import Any, Sequence
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.suite.hooks.sheets import GSheetsHook
class GCSToGoogleSheetsOperator(BaseOperator):
"""
Uploads .csv file from Google Cloud Storage to provided Google Spreadsheet.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GCSToGoogleSheets`
:param spreadsheet_id: The Google Sheet ID to interact with.
:param bucket_name: Name of GCS bucket.:
:param object_name: Path to the .csv file on the GCS bucket.
:param spreadsheet_range: The A1 notation of the values to retrieve.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"spreadsheet_id",
"bucket_name",
"object_name",
"spreadsheet_range",
"impersonation_chain",
)
def __init__(
self,
*,
spreadsheet_id: str,
bucket_name: str,
object_name: str,
spreadsheet_range: str = "Sheet1",
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.gcp_conn_id = gcp_conn_id
self.spreadsheet_id = spreadsheet_id
self.spreadsheet_range = spreadsheet_range
self.bucket_name = bucket_name
self.object_name = object_name
self.impersonation_chain = impersonation_chain
def execute(self, context: Any) -> None:
sheet_hook = GSheetsHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
gcs_hook = GCSHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
with NamedTemporaryFile("w+") as temp_file:
# Download data
gcs_hook.download(
bucket_name=self.bucket_name,
object_name=self.object_name,
filename=temp_file.name,
)
# Upload data
values = list(csv.reader(temp_file))
sheet_hook.update_values(
spreadsheet_id=self.spreadsheet_id,
range_=self.spreadsheet_range,
values=values,
)
| 4,193 | 38.566038 | 93 | py |
airflow | airflow-main/airflow/providers/google/suite/transfers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/suite/transfers/local_to_drive.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This file contains Google Drive operators."""
from __future__ import annotations
import os
from pathlib import Path
from typing import TYPE_CHECKING, Sequence
from airflow.exceptions import AirflowFailException
from airflow.models import BaseOperator
from airflow.providers.google.suite.hooks.drive import GoogleDriveHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class LocalFilesystemToGoogleDriveOperator(BaseOperator):
"""Upload a list of files to a Google Drive folder.
This operator uploads a list of local files to a Google Drive folder.
The local files can optionally be deleted after upload.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:LocalFilesystemToGoogleDriveOperator`
:param local_paths: Python list of local file paths
:param drive_folder: path of the Drive folder, if *folder_id* is given,
*drive_folder* is a sub path of the folder.
:param gcp_conn_id: Airflow Connection ID for GCP.
:param delete: Should the local files be deleted after upload?
:param ignore_if_missing: If *True*, don't fail even if some files can't be
uploaded.
:param chunk_size: File will be uploaded in chunks of this many bytes. Only
used when *resumable* is set to *True*. Pass in a value of -1 if the
file is to be uploaded as a single chunk. Note that Google App Engine
has a 5MB limit on request size, so you should never set your chunk size
larger than 5MB, or to -1.
:param resumable: True if this is a resumable upload. False means upload
in a single request.
:param delegate_to: The account to impersonate using domain-wide delegation
of authority, if any. For this to work, the service account making the
request must have domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using
short-term credentials, or chained list of accounts required to get the
access token of the last account in the list, which will be impersonated
in the request. If set as a string, the account must grant the
originating account the Service Account Token Creator IAM role. If set
as a sequence, the identities from the list must grant Service Account
Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account
:param folder_id: The base/root folder id for each local path in the Drive
folder.
:param show_full_target_path: If true then it reveals full available file
path in the logs.
:return: Remote file ids after upload.
"""
template_fields = (
"local_paths",
"drive_folder",
)
def __init__(
self,
local_paths: Sequence[Path] | Sequence[str],
drive_folder: Path | str,
gcp_conn_id: str = "google_cloud_default",
delete: bool = False,
ignore_if_missing: bool = False,
chunk_size: int = 100 * 1024 * 1024,
resumable: bool = False,
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
folder_id: str = "root",
show_full_target_path: bool = True,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.local_paths = local_paths
self.drive_folder = drive_folder
self.gcp_conn_id = gcp_conn_id
self.delete = delete
self.ignore_if_missing = ignore_if_missing
self.chunk_size = chunk_size
self.resumable = resumable
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.folder_id = folder_id
self.show_full_target_path = show_full_target_path
def execute(self, context: Context) -> list[str]:
hook = GoogleDriveHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
remote_file_ids = []
for local_path in self.local_paths:
self.log.info("Uploading file to Google Drive: %s", local_path)
try:
remote_file_id = hook.upload_file(
local_location=str(local_path),
remote_location=str(Path(self.drive_folder) / Path(local_path).name),
chunk_size=self.chunk_size,
resumable=self.resumable,
folder_id=self.folder_id,
show_full_target_path=self.show_full_target_path,
)
remote_file_ids.append(remote_file_id)
if self.delete:
os.remove(local_path)
self.log.info("Deleted local file: %s", local_path)
except FileNotFoundError:
self.log.warning("File can't be found: %s", local_path)
except OSError:
self.log.warning("An OSError occurred for file: %s", local_path)
if not self.ignore_if_missing and len(remote_file_ids) < len(self.local_paths):
raise AirflowFailException("Some files couldn't be uploaded")
return remote_file_ids
| 6,099 | 41.957746 | 89 | py |
airflow | airflow-main/airflow/providers/google/suite/operators/sheets.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any, Sequence
from airflow.models import BaseOperator
from airflow.providers.google.suite.hooks.sheets import GSheetsHook
class GoogleSheetsCreateSpreadsheetOperator(BaseOperator):
"""
Creates a new spreadsheet.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GoogleSheetsCreateSpreadsheetOperator`
:param spreadsheet: an instance of Spreadsheet
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets#Spreadsheet
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"spreadsheet",
"impersonation_chain",
)
def __init__(
self,
*,
spreadsheet: dict[str, Any],
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.gcp_conn_id = gcp_conn_id
self.spreadsheet = spreadsheet
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def execute(self, context: Any) -> dict[str, Any]:
hook = GSheetsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
spreadsheet = hook.create_spreadsheet(spreadsheet=self.spreadsheet)
self.xcom_push(context, "spreadsheet_id", spreadsheet["spreadsheetId"])
self.xcom_push(context, "spreadsheet_url", spreadsheet["spreadsheetUrl"])
return spreadsheet
| 3,394 | 41.974684 | 93 | py |
airflow | airflow-main/airflow/providers/google/suite/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/google/suite/hooks/calendar.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a Google Calendar API hook."""
from __future__ import annotations
from datetime import datetime
from typing import Any, Sequence
from googleapiclient.discovery import build
from airflow.exceptions import AirflowException
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GoogleCalendarHook(GoogleBaseHook):
"""
Interact with Google Calendar via Google Cloud connection.
Reading and writing cells in Google Sheet: https://developers.google.com/calendar/api/v3/reference
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param api_version: API Version. For example v3
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
def __init__(
self,
api_version: str,
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self.delegate_to = delegate_to
self._conn = None
def get_conn(self) -> Any:
"""
Retrieves connection to Google Calendar.
:return: Google Calendar services object.
"""
if not self._conn:
http_authorized = self._authorize()
self._conn = build("calendar", self.api_version, http=http_authorized, cache_discovery=False)
return self._conn
def get_events(
self,
calendar_id: str = "primary",
i_cal_uid: str | None = None,
max_attendees: int | None = None,
max_results: int | None = None,
order_by: str | None = None,
private_extended_property: str | None = None,
q: str | None = None,
shared_extended_property: str | None = None,
show_deleted: bool | None = False,
show_hidden_invitation: bool | None = False,
single_events: bool | None = False,
sync_token: str | None = None,
time_max: datetime | None = None,
time_min: datetime | None = None,
time_zone: str | None = None,
updated_min: datetime | None = None,
) -> list:
"""
Gets events from Google Calendar from a single calendar_id.
https://developers.google.com/calendar/api/v3/reference/events/list
:param calendar_id: The Google Calendar ID to interact with
:param i_cal_uid: Optional. Specifies event ID in the ``iCalendar`` format in the response.
:param max_attendees: Optional. If there are more than the specified number of attendees,
only the participant is returned.
:param max_results: Optional. Maximum number of events returned on one result page.
Incomplete pages can be detected by a non-empty ``nextPageToken`` field in the response.
By default the value is 250 events. The page size can never be larger than 2500 events
:param order_by: Optional. Acceptable values are ``"startTime"`` or "updated"
:param private_extended_property: Optional. Extended properties constraint specified as
``propertyName=value``. Matches only private properties. This parameter might be repeated
multiple times to return events that match all given constraints.
:param q: Optional. Free text search.
:param shared_extended_property: Optional. Extended properties constraint specified as
``propertyName=value``. Matches only shared properties. This parameter might be repeated
multiple times to return events that match all given constraints.
:param show_deleted: Optional. False by default
:param show_hidden_invitation: Optional. False by default
:param single_events: Optional. False by default
:param sync_token: Optional. Token obtained from the ``nextSyncToken`` field returned
:param time_max: Optional. Upper bound (exclusive) for an event's start time to filter by.
Default is no filter
:param time_min: Optional. Lower bound (exclusive) for an event's end time to filter by.
Default is no filter
:param time_zone: Optional. Time zone used in response. Default is calendars time zone.
:param updated_min: Optional. Lower bound for an event's last modification time
"""
service = self.get_conn()
page_token = None
events = []
while True:
response = (
service.events()
.list(
calendarId=calendar_id,
iCalUID=i_cal_uid,
maxAttendees=max_attendees,
maxResults=max_results,
orderBy=order_by,
pageToken=page_token,
privateExtendedProperty=private_extended_property,
q=q,
sharedExtendedProperty=shared_extended_property,
showDeleted=show_deleted,
showHiddenInvitations=show_hidden_invitation,
singleEvents=single_events,
syncToken=sync_token,
timeMax=time_max,
timeMin=time_min,
timeZone=time_zone,
updatedMin=updated_min,
)
.execute(num_retries=self.num_retries)
)
events.extend(response["items"])
page_token = response.get("nextPageToken")
if not page_token:
break
return events
def create_event(
self,
event: dict[str, Any],
calendar_id: str = "primary",
conference_data_version: int | None = 0,
max_attendees: int | None = None,
send_notifications: bool | None = False,
send_updates: str | None = "false",
supports_attachments: bool | None = False,
) -> dict:
"""
Create event on the specified calendar.
https://developers.google.com/calendar/api/v3/reference/events/insert.
:param calendar_id: The Google Calendar ID to interact with
:param conference_data_version: Optional. Version number of conference data
supported by the API client.
:param max_attendees: Optional. If there are more than the specified number of attendees,
only the participant is returned.
:param send_notifications: Optional. Default is False
:param send_updates: Optional. Default is "false". Acceptable values as "all", "none",
``"externalOnly"``
https://developers.google.com/calendar/api/v3/reference/events#resource
"""
if "start" not in event or "end" not in event:
raise AirflowException(
f"start and end must be specified in the event body while creating an event. API docs:"
f"https://developers.google.com/calendar/api/{self.api_version}/reference/events/insert "
)
service = self.get_conn()
response = (
service.events()
.insert(
calendarId=calendar_id,
conferenceDataVersion=conference_data_version,
maxAttendees=max_attendees,
sendNotifications=send_notifications,
sendUpdates=send_updates,
supportsAttachments=supports_attachments,
body=event,
)
.execute(num_retries=self.num_retries)
)
return response
| 9,307 | 43.32381 | 105 | py |
airflow | airflow-main/airflow/providers/google/suite/hooks/sheets.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a Google Sheets API hook."""
from __future__ import annotations
from typing import Any, Sequence
from googleapiclient.discovery import build
from airflow.exceptions import AirflowException
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GSheetsHook(GoogleBaseHook):
"""
Interact with Google Sheets via Google Cloud connection.
Reading and writing cells in Google Sheet: https://developers.google.com/sheets/api/guides/values
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param api_version: API Version
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
def __init__(
self,
gcp_conn_id: str = "google_cloud_default",
api_version: str = "v4",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self.delegate_to = delegate_to
self._conn = None
def get_conn(self) -> Any:
"""
Retrieves connection to Google Sheets.
:return: Google Sheets services object.
"""
if not self._conn:
http_authorized = self._authorize()
self._conn = build("sheets", self.api_version, http=http_authorized, cache_discovery=False)
return self._conn
def get_values(
self,
spreadsheet_id: str,
range_: str,
major_dimension: str = "DIMENSION_UNSPECIFIED",
value_render_option: str = "FORMATTED_VALUE",
date_time_render_option: str = "SERIAL_NUMBER",
) -> list:
"""
Gets values from Google Sheet from a single range.
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/get
:param spreadsheet_id: The Google Sheet ID to interact with
:param range_: The A1 notation of the values to retrieve.
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:return: An array of sheet values from the specified sheet.
"""
service = self.get_conn()
response = (
service.spreadsheets()
.values()
.get(
spreadsheetId=spreadsheet_id,
range=range_,
majorDimension=major_dimension,
valueRenderOption=value_render_option,
dateTimeRenderOption=date_time_render_option,
)
.execute(num_retries=self.num_retries)
)
return response.get("values", [])
def batch_get_values(
self,
spreadsheet_id: str,
ranges: list,
major_dimension: str = "DIMENSION_UNSPECIFIED",
value_render_option: str = "FORMATTED_VALUE",
date_time_render_option: str = "SERIAL_NUMBER",
) -> dict:
"""
Gets values from Google Sheet from a list of ranges.
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchGet
:param spreadsheet_id: The Google Sheet ID to interact with
:param ranges: The A1 notation of the values to retrieve.
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:return: Google Sheets API response.
"""
service = self.get_conn()
response = (
service.spreadsheets()
.values()
.batchGet(
spreadsheetId=spreadsheet_id,
ranges=ranges,
majorDimension=major_dimension,
valueRenderOption=value_render_option,
dateTimeRenderOption=date_time_render_option,
)
.execute(num_retries=self.num_retries)
)
return response
def update_values(
self,
spreadsheet_id: str,
range_: str,
values: list,
major_dimension: str = "ROWS",
value_input_option: str = "RAW",
include_values_in_response: bool = False,
value_render_option: str = "FORMATTED_VALUE",
date_time_render_option: str = "SERIAL_NUMBER",
) -> dict:
"""
Updates values from Google Sheet from a single range.
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/update
:param spreadsheet_id: The Google Sheet ID to interact with.
:param range_: The A1 notation of the values to retrieve.
:param values: Data within a range of the spreadsheet.
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:param value_input_option: Determines how input data should be interpreted.
RAW or USER_ENTERED
:param include_values_in_response: Determines if the update response should
include the values of the cells that were updated.
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:return: Google Sheets API response.
"""
service = self.get_conn()
body = {"range": range_, "majorDimension": major_dimension, "values": values}
response = (
service.spreadsheets()
.values()
.update(
spreadsheetId=spreadsheet_id,
range=range_,
valueInputOption=value_input_option,
includeValuesInResponse=include_values_in_response,
responseValueRenderOption=value_render_option,
responseDateTimeRenderOption=date_time_render_option,
body=body,
)
.execute(num_retries=self.num_retries)
)
return response
def batch_update_values(
self,
spreadsheet_id: str,
ranges: list,
values: list,
major_dimension: str = "ROWS",
value_input_option: str = "RAW",
include_values_in_response: bool = False,
value_render_option: str = "FORMATTED_VALUE",
date_time_render_option: str = "SERIAL_NUMBER",
) -> dict:
"""
Updates values from Google Sheet for multiple ranges.
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchUpdate
:param spreadsheet_id: The Google Sheet ID to interact with
:param ranges: The A1 notation of the values to retrieve.
:param values: Data within a range of the spreadsheet.
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:param value_input_option: Determines how input data should be interpreted.
RAW or USER_ENTERED
:param include_values_in_response: Determines if the update response should
include the values of the cells that were updated.
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:return: Google Sheets API response.
"""
if len(ranges) != len(values):
raise AirflowException(
f"'Ranges' and 'Lists' must be of equal length. "
f"'Ranges' is of length: {len(ranges)} and 'Values' is of length: {len(values)}."
)
service = self.get_conn()
data = []
for idx, range_ in enumerate(ranges):
value_range = {"range": range_, "majorDimension": major_dimension, "values": values[idx]}
data.append(value_range)
body = {
"valueInputOption": value_input_option,
"data": data,
"includeValuesInResponse": include_values_in_response,
"responseValueRenderOption": value_render_option,
"responseDateTimeRenderOption": date_time_render_option,
}
response = (
service.spreadsheets()
.values()
.batchUpdate(spreadsheetId=spreadsheet_id, body=body)
.execute(num_retries=self.num_retries)
)
return response
def append_values(
self,
spreadsheet_id: str,
range_: str,
values: list,
major_dimension: str = "ROWS",
value_input_option: str = "RAW",
insert_data_option: str = "OVERWRITE",
include_values_in_response: bool = False,
value_render_option: str = "FORMATTED_VALUE",
date_time_render_option: str = "SERIAL_NUMBER",
) -> dict:
"""
Append values from Google Sheet from a single range.
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/append
:param spreadsheet_id: The Google Sheet ID to interact with
:param range_: The A1 notation of the values to retrieve.
:param values: Data within a range of the spreadsheet.
:param major_dimension: Indicates which dimension an operation should apply to.
DIMENSION_UNSPECIFIED, ROWS, or COLUMNS
:param value_input_option: Determines how input data should be interpreted.
RAW or USER_ENTERED
:param insert_data_option: Determines how existing data is changed when new data is input.
OVERWRITE or INSERT_ROWS
:param include_values_in_response: Determines if the update response should
include the values of the cells that were updated.
:param value_render_option: Determines how values should be rendered in the output.
FORMATTED_VALUE, UNFORMATTED_VALUE, or FORMULA
:param date_time_render_option: Determines how dates should be rendered in the output.
SERIAL_NUMBER or FORMATTED_STRING
:return: Google Sheets API response.
"""
service = self.get_conn()
body = {"range": range_, "majorDimension": major_dimension, "values": values}
response = (
service.spreadsheets()
.values()
.append(
spreadsheetId=spreadsheet_id,
range=range_,
valueInputOption=value_input_option,
insertDataOption=insert_data_option,
includeValuesInResponse=include_values_in_response,
responseValueRenderOption=value_render_option,
responseDateTimeRenderOption=date_time_render_option,
body=body,
)
.execute(num_retries=self.num_retries)
)
return response
def clear(self, spreadsheet_id: str, range_: str) -> dict:
"""
Clear values from Google Sheet from a single range.
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear
:param spreadsheet_id: The Google Sheet ID to interact with
:param range_: The A1 notation of the values to retrieve.
:return: Google Sheets API response.
"""
service = self.get_conn()
response = (
service.spreadsheets()
.values()
.clear(spreadsheetId=spreadsheet_id, range=range_)
.execute(num_retries=self.num_retries)
)
return response
def batch_clear(self, spreadsheet_id: str, ranges: list) -> dict:
"""
Clear values from Google Sheet from a list of ranges.
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchClear
:param spreadsheet_id: The Google Sheet ID to interact with
:param ranges: The A1 notation of the values to retrieve.
:return: Google Sheets API response.
"""
service = self.get_conn()
body = {"ranges": ranges}
response = (
service.spreadsheets()
.values()
.batchClear(spreadsheetId=spreadsheet_id, body=body)
.execute(num_retries=self.num_retries)
)
return response
def get_spreadsheet(self, spreadsheet_id: str):
"""
Retrieves spreadsheet matching the given id.
:param spreadsheet_id: The spreadsheet id.
:return: An spreadsheet that matches the sheet filter.
"""
response = (
self.get_conn()
.spreadsheets()
.get(spreadsheetId=spreadsheet_id)
.execute(num_retries=self.num_retries)
)
return response
def get_sheet_titles(self, spreadsheet_id: str, sheet_filter: list[str] | None = None):
"""
Retrieves the sheet titles from a spreadsheet matching the given id and sheet filter.
:param spreadsheet_id: The spreadsheet id.
:param sheet_filter: List of sheet title to retrieve from sheet.
:return: An list of sheet titles from the specified sheet that match
the sheet filter.
"""
response = self.get_spreadsheet(spreadsheet_id=spreadsheet_id)
if sheet_filter:
titles = [
sh["properties"]["title"]
for sh in response["sheets"]
if sh["properties"]["title"] in sheet_filter
]
else:
titles = [sh["properties"]["title"] for sh in response["sheets"]]
return titles
def create_spreadsheet(self, spreadsheet: dict[str, Any]) -> dict[str, Any]:
"""
Creates a spreadsheet, returning the newly created spreadsheet.
:param spreadsheet: an instance of Spreadsheet
https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets#Spreadsheet
:return: An spreadsheet object.
"""
self.log.info("Creating spreadsheet: %s", spreadsheet["properties"]["title"])
response = (
self.get_conn().spreadsheets().create(body=spreadsheet).execute(num_retries=self.num_retries)
)
self.log.info("Spreadsheet: %s created", spreadsheet["properties"]["title"])
return response
| 16,757 | 39.18705 | 105 | py |
airflow | airflow-main/airflow/providers/google/suite/hooks/drive.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Google Drive service."""
from __future__ import annotations
from typing import IO, Any, Sequence
from googleapiclient.discovery import Resource, build
from googleapiclient.errors import Error as GoogleApiClientError
from googleapiclient.http import HttpRequest, MediaFileUpload
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GoogleDriveHook(GoogleBaseHook):
"""
Hook for the Google Drive APIs.
:param api_version: API version used (for example v3).
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
_conn: Resource | None = None
def __init__(
self,
api_version: str = "v3",
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.api_version = api_version
def get_conn(self) -> Any:
"""
Retrieves the connection to Google Drive.
:return: Google Drive services object.
"""
if not self._conn:
http_authorized = self._authorize()
self._conn = build("drive", self.api_version, http=http_authorized, cache_discovery=False)
return self._conn
def _ensure_folders_exists(self, path: str, folder_id: str) -> str:
service = self.get_conn()
current_parent = folder_id
folders = path.split("/")
depth = 0
# First tries to enter directories
for current_folder in folders:
self.log.debug("Looking for %s directory with %s parent", current_folder, current_parent)
conditions = [
"trashed=false",
"mimeType='application/vnd.google-apps.folder'",
f"name='{current_folder}'",
f"'{current_parent}' in parents",
]
result = (
service.files()
.list(
q=" and ".join(conditions),
spaces="drive",
fields="files(id, name)",
includeItemsFromAllDrives=True,
supportsAllDrives=True,
)
.execute(num_retries=self.num_retries)
)
files = result.get("files", [])
if not files:
self.log.info("Not found %s directory", current_folder)
# If the directory does not exist, break loops
break
depth += 1
current_parent = files[0].get("id")
# Check if there are directories to process
if depth != len(folders):
# Create missing directories
for current_folder in folders[depth:]:
file_metadata = {
"name": current_folder,
"mimeType": "application/vnd.google-apps.folder",
"parents": [current_parent],
}
file = (
service.files()
.create(
body=file_metadata,
fields="id",
supportsAllDrives=True,
)
.execute(num_retries=self.num_retries)
)
self.log.info("Created %s directory", current_folder)
current_parent = file.get("id")
# Return the ID of the last directory
return current_parent
def get_media_request(self, file_id: str) -> HttpRequest:
"""
Returns a get_media http request to a Google Drive object.
:param file_id: The Google Drive file id
:return: request
"""
service = self.get_conn()
request = service.files().get_media(fileId=file_id)
return request
def exists(
self, folder_id: str, file_name: str, drive_id: str | None = None, *, include_trashed: bool = True
) -> bool:
"""
Checks to see if a file exists within a Google Drive folder.
:param folder_id: The id of the Google Drive folder in which the file resides
:param file_name: The name of a file in Google Drive
:param drive_id: Optional. The id of the shared Google Drive in which the file resides.
:param include_trashed: Whether to include objects in trash or not, default True as in Google API.
:return: True if the file exists, False otherwise
"""
return bool(
self.get_file_id(
folder_id=folder_id, file_name=file_name, include_trashed=include_trashed, drive_id=drive_id
)
)
def _get_file_info(self, file_id: str):
"""
Returns Google API file_info object containing id, name, parents in the response.
https://developers.google.com/drive/api/v3/reference/files/get
:param file_id: id as string representation of interested file
:return: file
"""
file_info = (
self.get_conn()
.files()
.get(
fileId=file_id,
fields="id,name,parents",
supportsAllDrives=True,
)
.execute(num_retries=2)
)
return file_info
def _resolve_file_path(self, file_id: str) -> str:
"""
Returns the full Google Drive path for given file_id.
:param file_id: The id of a file in Google Drive
:return: Google Drive full path for a file
"""
has_reached_root = False
current_file_id = file_id
path: str = ""
while not has_reached_root:
# current_file_id can be file or directory id, Google API treats them the same way.
file_info = self._get_file_info(current_file_id)
if current_file_id == file_id:
path = f'{file_info["name"]}'
else:
path = f'{file_info["name"]}/{path}'
# Google API returns parents array if there is at least one object inside
if "parents" in file_info and len(file_info["parents"]) == 1:
# https://developers.google.com/drive/api/guides/ref-single-parent
current_file_id = file_info["parents"][0]
else:
has_reached_root = True
return path
def get_file_id(
self, folder_id: str, file_name: str, drive_id: str | None = None, *, include_trashed: bool = True
) -> dict:
"""
Returns the file id of a Google Drive file.
:param folder_id: The id of the Google Drive folder in which the file resides
:param file_name: The name of a file in Google Drive
:param drive_id: Optional. The id of the shared Google Drive in which the file resides.
:param include_trashed: Whether to include objects in trash or not, default True as in Google API.
:return: Google Drive file id if the file exists, otherwise None
"""
query = f"name = '{file_name}'"
if folder_id:
query += f" and parents in '{folder_id}'"
if not include_trashed:
query += " and trashed=false"
service = self.get_conn()
if drive_id:
files = (
service.files()
.list(
q=query,
spaces="drive",
fields="files(id, mimeType)",
orderBy="modifiedTime desc",
driveId=drive_id,
includeItemsFromAllDrives=True,
supportsAllDrives=True,
corpora="drive",
)
.execute(num_retries=self.num_retries)
)
else:
files = (
service.files()
.list(q=query, spaces="drive", fields="files(id, mimeType)", orderBy="modifiedTime desc")
.execute(num_retries=self.num_retries)
)
file_metadata = {}
if files["files"]:
file_metadata = {"id": files["files"][0]["id"], "mime_type": files["files"][0]["mimeType"]}
return file_metadata
def upload_file(
self,
local_location: str,
remote_location: str,
chunk_size: int = 100 * 1024 * 1024,
resumable: bool = False,
folder_id: str = "root",
show_full_target_path: bool = True,
) -> str:
"""
Uploads a file that is available locally to a Google Drive service.
:param local_location: The path where the file is available.
:param remote_location: The path where the file will be send
:param chunk_size: File will be uploaded in chunks of this many bytes. Only
used if resumable=True. Pass in a value of -1 if the file is to be
uploaded as a single chunk. Note that Google App Engine has a 5MB limit
on request size, so you should never set your chunk size larger than 5MB,
or to -1.
:param resumable: True if this is a resumable upload. False means upload
in a single request.
:param folder_id: The base/root folder id for remote_location (part of the drive URL of a folder).
:param show_full_target_path: If true then it reveals full available file path in the logs.
:return: File ID
"""
service = self.get_conn()
directory_path, _, file_name = remote_location.rpartition("/")
if directory_path:
parent = self._ensure_folders_exists(path=directory_path, folder_id=folder_id)
else:
parent = folder_id
file_metadata = {"name": file_name, "parents": [parent]}
media = MediaFileUpload(local_location, chunksize=chunk_size, resumable=resumable)
file = (
service.files()
.create(body=file_metadata, media_body=media, fields="id", supportsAllDrives=True)
.execute(num_retries=self.num_retries)
)
file_id = file.get("id")
upload_location = remote_location
if folder_id != "root":
try:
upload_location = self._resolve_file_path(folder_id)
except GoogleApiClientError as e:
self.log.warning("A problem has been encountered when trying to resolve file path: ", e)
if show_full_target_path:
self.log.info("File %s uploaded to gdrive://%s.", local_location, upload_location)
else:
self.log.info("File %s has been uploaded successfully to gdrive", local_location)
return file_id
def download_file(self, file_id: str, file_handle: IO, chunk_size: int = 100 * 1024 * 1024):
"""
Download a file from Google Drive.
:param file_id: the id of the file
:param file_handle: file handle used to write the content to
:param chunk_size: File will be downloaded in chunks of this many bytes.
"""
request = self.get_media_request(file_id=file_id)
self.download_content_from_request(file_handle=file_handle, request=request, chunk_size=chunk_size)
| 12,910 | 39.096273 | 108 | py |
airflow | airflow-main/airflow/providers/google/suite/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/google/suite/sensors/drive.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Drive sensors."""
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.providers.google.suite.hooks.drive import GoogleDriveHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class GoogleDriveFileExistenceSensor(BaseSensorOperator):
"""
Checks for the existence of a file in Google Cloud Storage.
:param folder_id: The Google drive folder where the file is.
:param file_name: The name of the file to check in Google Drive
:param drive_id: Optional. The id of the shared Google Drive in which the file resides.
:param gcp_conn_id: The connection ID to use when
connecting to Google Cloud Storage.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"folder_id",
"file_name",
"drive_id",
"impersonation_chain",
)
ui_color = "#f0eee4"
def __init__(
self,
*,
folder_id: str,
file_name: str,
drive_id: str | None = None,
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.folder_id = folder_id
self.file_name = file_name
self.drive_id = drive_id
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def poke(self, context: Context) -> bool:
self.log.info("Sensor is checking for the file %s in the folder %s", self.file_name, self.folder_id)
hook = GoogleDriveHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
return hook.exists(folder_id=self.folder_id, file_name=self.file_name, drive_id=self.drive_id)
| 3,616 | 40.102273 | 108 | py |
airflow | airflow-main/airflow/providers/google/suite/sensors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/firebase/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/firebase/operators/firestore.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.google.firebase.hooks.firestore import CloudFirestoreHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class CloudFirestoreExportDatabaseOperator(BaseOperator):
"""
Export documents from Google Cloud Firestore to another storage system, such as Google Cloud Storage.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudFirestoreExportDatabaseOperator`
:param database_id: The Database ID.
:param body: The request body.
See:
https://firebase.google.com/docs/firestore/reference/rest/v1beta1/projects.databases/exportDocuments
:param project_id: ID of the Google Cloud project if None then
default project_id is used.
:param gcp_conn_id: The connection ID to use to connect to Google Cloud.
:param api_version: API version used (for example v1 or v1beta1).
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"body",
"gcp_conn_id",
"api_version",
"impersonation_chain",
)
def __init__(
self,
*,
body: dict,
database_id: str = "(default)",
project_id: str | None = None,
gcp_conn_id: str = "google_cloud_default",
api_version: str = "v1",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.database_id = database_id
self.body = body
self.project_id = project_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self.impersonation_chain = impersonation_chain
def _validate_inputs(self) -> None:
if not self.body:
raise AirflowException("The required parameter 'body' is missing")
def execute(self, context: Context):
hook = CloudFirestoreHook(
gcp_conn_id=self.gcp_conn_id,
api_version=self.api_version,
impersonation_chain=self.impersonation_chain,
)
return hook.export_documents(database_id=self.database_id, body=self.body, project_id=self.project_id)
| 3,784 | 39.698925 | 110 | py |
airflow | airflow-main/airflow/providers/google/firebase/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/firebase/hooks/firestore.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Google Cloud Firestore service."""
from __future__ import annotations
import time
from typing import Sequence
from googleapiclient.discovery import build, build_from_document
from airflow.exceptions import AirflowException
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
# Time to sleep between active checks of the operation results
TIME_TO_SLEEP_IN_SECONDS = 5
class CloudFirestoreHook(GoogleBaseHook):
"""
Hook for the Google Firestore APIs.
All the methods in the hook where project_id is used must be called with
keyword arguments rather than positional.
:param api_version: API version used (for example v1 or v1beta1).
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
_conn: build | None = None
def __init__(
self,
api_version: str = "v1",
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
impersonation_chain=impersonation_chain,
)
self.api_version = api_version
def get_conn(self):
"""
Retrieves the connection to Cloud Firestore.
:return: Google Cloud Firestore services object.
"""
if not self._conn:
http_authorized = self._authorize()
# We cannot use an Authorized Client to retrieve discovery document due to an error in the API.
# When the authorized customer will send a request to the address below
# https://www.googleapis.com/discovery/v1/apis/firestore/v1/rest
# then it will get the message below:
# > Request contains an invalid argument.
# At the same time, the Non-Authorized Client has no problems.
non_authorized_conn = build("firestore", self.api_version, cache_discovery=False)
self._conn = build_from_document(non_authorized_conn._rootDesc, http=http_authorized)
return self._conn
@GoogleBaseHook.fallback_to_default_project_id
def export_documents(
self, body: dict, database_id: str = "(default)", project_id: str | None = None
) -> None:
"""
Starts a export with the specified configuration.
:param database_id: The Database ID.
:param body: The request body.
See:
https://firebase.google.com/docs/firestore/reference/rest/v1beta1/projects.databases/exportDocuments
:param project_id: Optional, Google Cloud Project project_id where the database belongs.
If set to None or missing, the default project_id from the Google Cloud connection is used.
"""
service = self.get_conn()
name = f"projects/{project_id}/databases/{database_id}"
operation = (
service.projects()
.databases()
.exportDocuments(name=name, body=body)
.execute(num_retries=self.num_retries)
)
self._wait_for_operation_to_complete(operation["name"])
def _wait_for_operation_to_complete(self, operation_name: str) -> None:
"""
Waits for the named operation to complete - checks status of the asynchronous call.
:param operation_name: The name of the operation.
:return: The response returned by the operation.
:exception: AirflowException in case error is returned.
"""
service = self.get_conn()
while True:
operation_response = (
service.projects()
.databases()
.operations()
.get(name=operation_name)
.execute(num_retries=self.num_retries)
)
if operation_response.get("done"):
response = operation_response.get("response")
error = operation_response.get("error")
# Note, according to documentation always either response or error is
# set when "done" == True
if error:
raise AirflowException(str(error))
return response
time.sleep(TIME_TO_SLEEP_IN_SECONDS)
| 5,643 | 40.19708 | 112 | py |
airflow | airflow-main/airflow/providers/google/firebase/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/cloud/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/cloud/secrets/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/google/cloud/secrets/secret_manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Objects relating to sourcing connections from Google Cloud Secrets Manager."""
from __future__ import annotations
import logging
import re
import warnings
from google.auth.exceptions import DefaultCredentialsError
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.providers.google.cloud._internal_client.secret_manager_client import _SecretManagerClient
from airflow.providers.google.cloud.utils.credentials_provider import get_credentials_and_project_id
from airflow.secrets import BaseSecretsBackend
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.version import version as airflow_version
log = logging.getLogger(__name__)
SECRET_ID_PATTERN = r"^[a-zA-Z0-9-_]*$"
def _parse_version(val):
val = re.sub(r"(\d+\.\d+\.\d+).*", lambda x: x.group(1), val)
return tuple(int(x) for x in val.split("."))
class CloudSecretManagerBackend(BaseSecretsBackend, LoggingMixin):
"""
Retrieves Connection object from Google Cloud Secrets Manager.
Configurable via ``airflow.cfg`` as follows:
.. code-block:: ini
[secrets]
backend = airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend
backend_kwargs = {"connections_prefix": "airflow-connections", "sep": "-"}
For example, if the Secrets Manager secret id is ``airflow-connections-smtp_default``, this would be
accessible if you provide ``{"connections_prefix": "airflow-connections", "sep": "-"}`` and request
conn_id ``smtp_default``.
If the Secrets Manager secret id is ``airflow-variables-hello``, this would be
accessible if you provide ``{"variables_prefix": "airflow-variables", "sep": "-"}`` and request
Variable Key ``hello``.
The full secret id should follow the pattern "[a-zA-Z0-9-_]".
:param connections_prefix: Specifies the prefix of the secret to read to get Connections.
If set to None (null), requests for connections will not be sent to GCP Secrets Manager
:param variables_prefix: Specifies the prefix of the secret to read to get Variables.
If set to None (null), requests for variables will not be sent to GCP Secrets Manager
:param config_prefix: Specifies the prefix of the secret to read to get Airflow Configurations
containing secrets.
If set to None (null), requests for configurations will not be sent to GCP Secrets Manager
:param gcp_key_path: Path to Google Cloud Service Account key file (JSON). Mutually exclusive with
gcp_keyfile_dict. use default credentials in the current environment if not provided.
:param gcp_keyfile_dict: Dictionary of keyfile parameters. Mutually exclusive with gcp_key_path.
:param gcp_credential_config_file: File path to or content of a GCP credential configuration file.
:param gcp_scopes: Comma-separated string containing OAuth2 scopes
:param project_id: Project ID to read the secrets from. If not passed, the project ID from credentials
will be used.
:param sep: Separator used to concatenate connections_prefix and conn_id. Default: "-"
"""
def __init__(
self,
connections_prefix: str = "airflow-connections",
variables_prefix: str = "airflow-variables",
config_prefix: str = "airflow-config",
gcp_keyfile_dict: dict | None = None,
gcp_key_path: str | None = None,
gcp_credential_config_file: dict[str, str] | str | None = None,
gcp_scopes: str | None = None,
project_id: str | None = None,
sep: str = "-",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.connections_prefix = connections_prefix
self.variables_prefix = variables_prefix
self.config_prefix = config_prefix
self.sep = sep
if connections_prefix is not None:
if not self._is_valid_prefix_and_sep():
raise AirflowException(
"`connections_prefix`, `variables_prefix` and `sep` should "
f"follows that pattern {SECRET_ID_PATTERN}"
)
try:
self.credentials, self.project_id = get_credentials_and_project_id(
keyfile_dict=gcp_keyfile_dict,
key_path=gcp_key_path,
credential_config_file=gcp_credential_config_file,
scopes=gcp_scopes,
)
except (DefaultCredentialsError, FileNotFoundError):
log.exception(
"Unable to load credentials for GCP Secret Manager. "
"Make sure that the keyfile path or dictionary, credential configuration file, "
"or GOOGLE_APPLICATION_CREDENTIALS environment variable is correct and properly configured."
)
# In case project id provided
if project_id:
self.project_id = project_id
@property
def client(self) -> _SecretManagerClient:
"""
Property returning secret client.
:return: Secrets client
"""
return _SecretManagerClient(credentials=self.credentials)
def _is_valid_prefix_and_sep(self) -> bool:
prefix = self.connections_prefix + self.sep
return _SecretManagerClient.is_valid_secret_name(prefix)
def get_conn_value(self, conn_id: str) -> str | None:
"""
Get serialized representation of Connection.
:param conn_id: connection id
"""
if self.connections_prefix is None:
return None
return self._get_secret(self.connections_prefix, conn_id)
def get_conn_uri(self, conn_id: str) -> str | None:
"""
Return URI representation of Connection conn_id.
As of Airflow version 2.3.0 this method is deprecated.
:param conn_id: the connection id
:return: deserialized Connection
"""
if _parse_version(airflow_version) >= (2, 3):
warnings.warn(
f"Method `{self.__class__.__name__}.get_conn_uri` is deprecated and will be removed "
"in a future release. Please use method `get_conn_value` instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
return self.get_conn_value(conn_id)
def get_variable(self, key: str) -> str | None:
"""
Get Airflow Variable from Environment Variable.
:param key: Variable Key
:return: Variable Value
"""
if self.variables_prefix is None:
return None
return self._get_secret(self.variables_prefix, key)
def get_config(self, key: str) -> str | None:
"""
Get Airflow Configuration.
:param key: Configuration Option Key
:return: Configuration Option Value
"""
if self.config_prefix is None:
return None
return self._get_secret(self.config_prefix, key)
def _get_secret(self, path_prefix: str, secret_id: str) -> str | None:
"""
Get secret value from the SecretManager based on prefix.
:param path_prefix: Prefix for the Path to get Secret
:param secret_id: Secret Key
"""
secret_id = self.build_path(path_prefix, secret_id, self.sep)
return self.client.get_secret(secret_id=secret_id, project_id=self.project_id)
| 8,133 | 39.874372 | 108 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/life_sciences.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
BASE_LINK = "https://console.cloud.google.com/lifesciences"
LIFESCIENCES_LIST_LINK = BASE_LINK + "/pipelines?project={project_id}"
class LifeSciencesLink(BaseGoogleLink):
"""Helper class for constructing Life Sciences List link."""
name = "Life Sciences"
key = "lifesciences_key"
format_str = LIFESCIENCES_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context=context,
key=LifeSciencesLink.key,
value={
"project_id": project_id,
},
)
| 1,629 | 31.6 | 70 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/base.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, ClassVar
from airflow.models import BaseOperatorLink, XCom
if TYPE_CHECKING:
from airflow.models import BaseOperator
from airflow.models.taskinstancekey import TaskInstanceKey
BASE_LINK = "https://console.cloud.google.com"
class BaseGoogleLink(BaseOperatorLink):
"""Base class for all Google links.
:meta private:
"""
name: ClassVar[str]
key: ClassVar[str]
format_str: ClassVar[str]
def get_link(
self,
operator: BaseOperator,
*,
ti_key: TaskInstanceKey,
) -> str:
conf = XCom.get_value(key=self.key, ti_key=ti_key)
if not conf:
return ""
if self.format_str.startswith("http"):
return self.format_str.format(**conf)
return BASE_LINK + self.format_str.format(**conf)
| 1,660 | 29.759259 | 62 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/cloud_memorystore.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Cloud Memorystore links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
BASE_LINK = "/memorystore"
MEMCACHED_LINK = (
BASE_LINK + "/memcached/locations/{location_id}/instances/{instance_id}/details?project={project_id}"
)
MEMCACHED_LIST_LINK = BASE_LINK + "/memcached/instances?project={project_id}"
REDIS_LINK = (
BASE_LINK + "/redis/locations/{location_id}/instances/{instance_id}/details/overview?project={project_id}"
)
REDIS_LIST_LINK = BASE_LINK + "/redis/instances?project={project_id}"
class MemcachedInstanceDetailsLink(BaseGoogleLink):
"""Helper class for constructing Memorystore Memcached Instance Link."""
name = "Memorystore Memcached Instance"
key = "memcached_instance"
format_str = MEMCACHED_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
instance_id: str,
location_id: str,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=MemcachedInstanceDetailsLink.key,
value={"instance_id": instance_id, "location_id": location_id, "project_id": project_id},
)
class MemcachedInstanceListLink(BaseGoogleLink):
"""Helper class for constructing Memorystore Memcached List of Instances Link."""
name = "Memorystore Memcached List of Instances"
key = "memcached_instances"
format_str = MEMCACHED_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=MemcachedInstanceListLink.key,
value={"project_id": project_id},
)
class RedisInstanceDetailsLink(BaseGoogleLink):
"""Helper class for constructing Memorystore Redis Instance Link."""
name = "Memorystore Redis Instance"
key = "redis_instance"
format_str = REDIS_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
instance_id: str,
location_id: str,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=RedisInstanceDetailsLink.key,
value={"instance_id": instance_id, "location_id": location_id, "project_id": project_id},
)
class RedisInstanceListLink(BaseGoogleLink):
"""Helper class for constructing Memorystore Redis List of Instances Link."""
name = "Memorystore Redis List of Instances"
key = "redis_instances"
format_str = REDIS_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=RedisInstanceListLink.key,
value={"project_id": project_id},
)
| 3,866 | 30.696721 | 110 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/data_loss_prevention.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
BASE_LINK = "https://console.cloud.google.com"
DLP_BASE_LINK = BASE_LINK + "/security/dlp"
DLP_DEIDENTIFY_TEMPLATES_LIST_LINK = (
DLP_BASE_LINK + "/landing/configuration/templates/deidentify?project={project_id}"
)
DLP_DEIDENTIFY_TEMPLATE_DETAILS_LINK = (
DLP_BASE_LINK
+ "/projects/{project_id}/locations/global/deidentifyTemplates/{template_name}?project={project_id}"
)
DLP_JOB_TRIGGER_LIST_LINK = DLP_BASE_LINK + "/landing/inspection/triggers?project={project_id}"
DLP_JOB_TRIGGER_DETAILS_LINK = (
DLP_BASE_LINK + "/projects/{project_id}/locations/global/jobTriggers/{trigger_name}?project={project_id}"
)
DLP_JOBS_LIST_LINK = DLP_BASE_LINK + "/landing/inspection/jobs?project={project_id}"
DLP_JOB_DETAILS_LINK = (
DLP_BASE_LINK + "/projects/{project_id}/locations/global/dlpJobs/{job_name}?project={project_id}"
)
DLP_INSPECT_TEMPLATES_LIST_LINK = (
DLP_BASE_LINK + "/landing/configuration/templates/inspect?project={project_id}"
)
DLP_INSPECT_TEMPLATE_DETAILS_LINK = (
DLP_BASE_LINK
+ "/projects/{project_id}/locations/global/inspectTemplates/{template_name}?project={project_id}"
)
DLP_INFO_TYPES_LIST_LINK = (
DLP_BASE_LINK + "/landing/configuration/infoTypes/stored?cloudshell=false&project={project_id}"
)
DLP_INFO_TYPE_DETAILS_LINK = (
DLP_BASE_LINK
+ "/projects/{project_id}/locations/global/storedInfoTypes/{info_type_name}?project={project_id}"
)
DLP_POSSIBLE_INFO_TYPES_LIST_LINK = (
DLP_BASE_LINK + "/landing/configuration/infoTypes/built-in?project={project_id}"
)
class CloudDLPDeidentifyTemplatesListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Deidentify Templates List"
key = "cloud_dlp_deidentify_templates_list_key"
format_str = DLP_DEIDENTIFY_TEMPLATES_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPDeidentifyTemplatesListLink.key,
value={
"project_id": project_id,
},
)
class CloudDLPDeidentifyTemplateDetailsLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Deidentify Template Details"
key = "cloud_dlp_deidentify_template_details_key"
format_str = DLP_DEIDENTIFY_TEMPLATE_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
template_name: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPDeidentifyTemplateDetailsLink.key,
value={
"project_id": project_id,
"template_name": template_name,
},
)
class CloudDLPJobTriggersListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Job Triggers List"
key = "cloud_dlp_job_triggers_list_key"
format_str = DLP_JOB_TRIGGER_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPJobTriggersListLink.key,
value={
"project_id": project_id,
},
)
class CloudDLPJobTriggerDetailsLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Job Triggers Details"
key = "cloud_dlp_job_trigger_details_key"
format_str = DLP_JOB_TRIGGER_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
trigger_name: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPJobTriggerDetailsLink.key,
value={
"project_id": project_id,
"trigger_name": trigger_name,
},
)
class CloudDLPJobsListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Jobs List"
key = "cloud_dlp_jobs_list_key"
format_str = DLP_JOBS_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPJobsListLink.key,
value={
"project_id": project_id,
},
)
class CloudDLPJobDetailsLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Job Details"
key = "cloud_dlp_job_details_key"
format_str = DLP_JOB_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
job_name: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPJobDetailsLink.key,
value={
"project_id": project_id,
"job_name": job_name,
},
)
class CloudDLPInspectTemplatesListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Inspect Templates List"
key = "cloud_dlp_inspect_templates_list_key"
format_str = DLP_INSPECT_TEMPLATES_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPInspectTemplatesListLink.key,
value={
"project_id": project_id,
},
)
class CloudDLPInspectTemplateDetailsLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Inspect Template Details"
key = "cloud_dlp_inspect_template_details_key"
format_str = DLP_INSPECT_TEMPLATE_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
template_name: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPInspectTemplateDetailsLink.key,
value={
"project_id": project_id,
"template_name": template_name,
},
)
class CloudDLPInfoTypesListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Info Types List"
key = "cloud_dlp_info_types_list_key"
format_str = DLP_INFO_TYPES_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPInfoTypesListLink.key,
value={
"project_id": project_id,
},
)
class CloudDLPInfoTypeDetailsLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Info Type Details"
key = "cloud_dlp_info_type_details_key"
format_str = DLP_INFO_TYPE_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
info_type_name: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPInfoTypeDetailsLink.key,
value={
"project_id": project_id,
"info_type_name": info_type_name,
},
)
class CloudDLPPossibleInfoTypesListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Data Loss Prevention link."""
name = "Cloud DLP Possible Info Types List"
key = "cloud_dlp_possible_info_types_list_key"
format_str = DLP_POSSIBLE_INFO_TYPES_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context=context,
key=CloudDLPPossibleInfoTypesListLink.key,
value={
"project_id": project_id,
},
)
| 9,291 | 28.128527 | 109 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/vertex_ai.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
VERTEX_AI_BASE_LINK = "/vertex-ai"
VERTEX_AI_MODEL_LINK = (
VERTEX_AI_BASE_LINK + "/locations/{region}/models/{model_id}/deploy?project={project_id}"
)
VERTEX_AI_MODEL_LIST_LINK = VERTEX_AI_BASE_LINK + "/models?project={project_id}"
VERTEX_AI_MODEL_EXPORT_LINK = "/storage/browser/{bucket_name}/model-{model_id}?project={project_id}"
VERTEX_AI_TRAINING_LINK = (
VERTEX_AI_BASE_LINK + "/locations/{region}/training/{training_id}/cpu?project={project_id}"
)
VERTEX_AI_TRAINING_PIPELINES_LINK = VERTEX_AI_BASE_LINK + "/training/training-pipelines?project={project_id}"
VERTEX_AI_DATASET_LINK = (
VERTEX_AI_BASE_LINK + "/locations/{region}/datasets/{dataset_id}/analyze?project={project_id}"
)
VERTEX_AI_DATASET_LIST_LINK = VERTEX_AI_BASE_LINK + "/datasets?project={project_id}"
VERTEX_AI_HYPERPARAMETER_TUNING_JOB_LIST_LINK = (
VERTEX_AI_BASE_LINK + "/training/hyperparameter-tuning-jobs?project={project_id}"
)
VERTEX_AI_BATCH_PREDICTION_JOB_LINK = (
VERTEX_AI_BASE_LINK
+ "/locations/{region}/batch-predictions/{batch_prediction_job_id}?project={project_id}"
)
VERTEX_AI_BATCH_PREDICTION_JOB_LIST_LINK = VERTEX_AI_BASE_LINK + "/batch-predictions?project={project_id}"
VERTEX_AI_ENDPOINT_LINK = (
VERTEX_AI_BASE_LINK + "/locations/{region}/endpoints/{endpoint_id}?project={project_id}"
)
VERTEX_AI_ENDPOINT_LIST_LINK = VERTEX_AI_BASE_LINK + "/endpoints?project={project_id}"
class VertexAIModelLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI Model link."""
name = "Vertex AI Model"
key = "model_conf"
format_str = VERTEX_AI_MODEL_LINK
@staticmethod
def persist(
context: Context,
task_instance,
model_id: str,
):
task_instance.xcom_push(
context=context,
key=VertexAIModelLink.key,
value={
"model_id": model_id,
"region": task_instance.region,
"project_id": task_instance.project_id,
},
)
class VertexAIModelListLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI Models Link."""
name = "Model List"
key = "models_conf"
format_str = VERTEX_AI_MODEL_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=VertexAIModelListLink.key,
value={
"project_id": task_instance.project_id,
},
)
class VertexAIModelExportLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI Model Export Link."""
name = "Export Model"
key = "export_conf"
format_str = VERTEX_AI_MODEL_EXPORT_LINK
@staticmethod
def extract_bucket_name(config):
"""Returns bucket name from output configuration."""
return config["artifact_destination"]["output_uri_prefix"].rpartition("gs://")[-1]
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=VertexAIModelExportLink.key,
value={
"project_id": task_instance.project_id,
"model_id": task_instance.model_id,
"bucket_name": VertexAIModelExportLink.extract_bucket_name(task_instance.output_config),
},
)
class VertexAITrainingLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI Training link."""
name = "Vertex AI Training"
key = "training_conf"
format_str = VERTEX_AI_TRAINING_LINK
@staticmethod
def persist(
context: Context,
task_instance,
training_id: str,
):
task_instance.xcom_push(
context=context,
key=VertexAITrainingLink.key,
value={
"training_id": training_id,
"region": task_instance.region,
"project_id": task_instance.project_id,
},
)
class VertexAITrainingPipelinesLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI Training Pipelines link."""
name = "Vertex AI Training Pipelines"
key = "pipelines_conf"
format_str = VERTEX_AI_TRAINING_PIPELINES_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=VertexAITrainingPipelinesLink.key,
value={
"project_id": task_instance.project_id,
},
)
class VertexAIDatasetLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI Dataset link."""
name = "Dataset"
key = "dataset_conf"
format_str = VERTEX_AI_DATASET_LINK
@staticmethod
def persist(context: Context, task_instance, dataset_id: str):
task_instance.xcom_push(
context=context,
key=VertexAIDatasetLink.key,
value={
"dataset_id": dataset_id,
"region": task_instance.region,
"project_id": task_instance.project_id,
},
)
class VertexAIDatasetListLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI Datasets Link."""
name = "Dataset List"
key = "datasets_conf"
format_str = VERTEX_AI_DATASET_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=VertexAIDatasetListLink.key,
value={
"project_id": task_instance.project_id,
},
)
class VertexAIHyperparameterTuningJobListLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI HyperparameterTuningJobs Link."""
name = "Hyperparameter Tuning Job List"
key = "hyperparameter_tuning_jobs_conf"
format_str = VERTEX_AI_HYPERPARAMETER_TUNING_JOB_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=VertexAIHyperparameterTuningJobListLink.key,
value={
"project_id": task_instance.project_id,
},
)
class VertexAIBatchPredictionJobLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI BatchPredictionJob link."""
name = "Batch Prediction Job"
key = "batch_prediction_job_conf"
format_str = VERTEX_AI_BATCH_PREDICTION_JOB_LINK
@staticmethod
def persist(
context: Context,
task_instance,
batch_prediction_job_id: str,
):
task_instance.xcom_push(
context=context,
key=VertexAIBatchPredictionJobLink.key,
value={
"batch_prediction_job_id": batch_prediction_job_id,
"region": task_instance.region,
"project_id": task_instance.project_id,
},
)
class VertexAIBatchPredictionJobListLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI BatchPredictionJobList link."""
name = "Batch Prediction Job List"
key = "batch_prediction_jobs_conf"
format_str = VERTEX_AI_BATCH_PREDICTION_JOB_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=VertexAIBatchPredictionJobListLink.key,
value={
"project_id": task_instance.project_id,
},
)
class VertexAIEndpointLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI Endpoint link."""
name = "Endpoint"
key = "endpoint_conf"
format_str = VERTEX_AI_ENDPOINT_LINK
@staticmethod
def persist(
context: Context,
task_instance,
endpoint_id: str,
):
task_instance.xcom_push(
context=context,
key=VertexAIEndpointLink.key,
value={
"endpoint_id": endpoint_id,
"region": task_instance.region,
"project_id": task_instance.project_id,
},
)
class VertexAIEndpointListLink(BaseGoogleLink):
"""Helper class for constructing Vertex AI EndpointList link."""
name = "Endpoint List"
key = "endpoints_conf"
format_str = VERTEX_AI_ENDPOINT_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=VertexAIEndpointListLink.key,
value={
"project_id": task_instance.project_id,
},
)
| 9,696 | 29.114907 | 109 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/cloud_functions.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Cloud Functions links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
CLOUD_FUNCTIONS_BASE_LINK = "https://console.cloud.google.com/functions"
CLOUD_FUNCTIONS_DETAILS_LINK = (
CLOUD_FUNCTIONS_BASE_LINK + "/details/{location}/{function_name}?project={project_id}"
)
CLOUD_FUNCTIONS_LIST_LINK = CLOUD_FUNCTIONS_BASE_LINK + "/list?project={project_id}"
class CloudFunctionsDetailsLink(BaseGoogleLink):
"""Helper class for constructing Cloud Functions Details Link."""
name = "Cloud Functions Details"
key = "cloud_functions_details"
format_str = CLOUD_FUNCTIONS_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
function_name: str,
location: str,
project_id: str,
):
task_instance.xcom_push(
context,
key=CloudFunctionsDetailsLink.key,
value={"function_name": function_name, "location": location, "project_id": project_id},
)
class CloudFunctionsListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Functions Details Link."""
name = "Cloud Functions List"
key = "cloud_functions_list"
format_str = CLOUD_FUNCTIONS_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
project_id: str,
):
task_instance.xcom_push(
context,
key=CloudFunctionsDetailsLink.key,
value={"project_id": project_id},
)
| 2,529 | 30.234568 | 99 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/dataplex.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Dataplex links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
DATAPLEX_BASE_LINK = "/dataplex/process/tasks"
DATAPLEX_TASK_LINK = DATAPLEX_BASE_LINK + "/{lake_id}.{task_id};location={region}/jobs?project={project_id}"
DATAPLEX_TASKS_LINK = DATAPLEX_BASE_LINK + "?project={project_id}&qLake={lake_id}.{region}"
DATAPLEX_LAKE_LINK = (
"https://console.cloud.google.com/dataplex/lakes/{lake_id};location={region}?project={project_id}"
)
class DataplexTaskLink(BaseGoogleLink):
"""Helper class for constructing Dataplex Task link."""
name = "Dataplex Task"
key = "task_conf"
format_str = DATAPLEX_TASK_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=DataplexTaskLink.key,
value={
"lake_id": task_instance.lake_id,
"task_id": task_instance.dataplex_task_id,
"region": task_instance.region,
"project_id": task_instance.project_id,
},
)
class DataplexTasksLink(BaseGoogleLink):
"""Helper class for constructing Dataplex Tasks link."""
name = "Dataplex Tasks"
key = "tasks_conf"
format_str = DATAPLEX_TASKS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=DataplexTasksLink.key,
value={
"project_id": task_instance.project_id,
"lake_id": task_instance.lake_id,
"region": task_instance.region,
},
)
class DataplexLakeLink(BaseGoogleLink):
"""Helper class for constructing Dataplex Lake link."""
name = "Dataplex Lake"
key = "dataplex_lake_key"
format_str = DATAPLEX_LAKE_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=DataplexLakeLink.key,
value={
"lake_id": task_instance.lake_id,
"region": task_instance.region,
"project_id": task_instance.project_id,
},
)
| 3,229 | 29.761905 | 108 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/datastore.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
DATASTORE_BASE_LINK = "/datastore"
DATASTORE_IMPORT_EXPORT_LINK = DATASTORE_BASE_LINK + "/import-export?project={project_id}"
DATASTORE_EXPORT_ENTITIES_LINK = "/storage/browser/{bucket_name}/{export_name}?project={project_id}"
DATASTORE_ENTITIES_LINK = DATASTORE_BASE_LINK + "/entities/query/kind?project={project_id}"
class CloudDatastoreImportExportLink(BaseGoogleLink):
"""Helper class for constructing Cloud Datastore Import/Export Link."""
name = "Import/Export Page"
key = "import_export_conf"
format_str = DATASTORE_IMPORT_EXPORT_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=CloudDatastoreImportExportLink.key,
value={
"project_id": task_instance.project_id,
},
)
class CloudDatastoreEntitiesLink(BaseGoogleLink):
"""Helper class for constructing Cloud Datastore Entities Link."""
name = "Entities"
key = "entities_conf"
format_str = DATASTORE_ENTITIES_LINK
@staticmethod
def persist(
context: Context,
task_instance,
):
task_instance.xcom_push(
context=context,
key=CloudDatastoreEntitiesLink.key,
value={
"project_id": task_instance.project_id,
},
)
| 2,374 | 31.986111 | 100 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/mlengine.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google ML Engine links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
MLENGINE_BASE_LINK = "https://console.cloud.google.com/ai-platform"
MLENGINE_MODEL_DETAILS_LINK = MLENGINE_BASE_LINK + "/models/{model_id}/versions?project={project_id}"
MLENGINE_MODEL_VERSION_DETAILS_LINK = (
MLENGINE_BASE_LINK + "/models/{model_id}/versions/{version_id}/performance?project={project_id}"
)
MLENGINE_MODELS_LIST_LINK = MLENGINE_BASE_LINK + "/models/?project={project_id}"
MLENGINE_JOB_DETAILS_LINK = MLENGINE_BASE_LINK + "/jobs/{job_id}?project={project_id}"
MLENGINE_JOBS_LIST_LINK = MLENGINE_BASE_LINK + "/jobs?project={project_id}"
class MLEngineModelLink(BaseGoogleLink):
"""Helper class for constructing ML Engine link."""
name = "MLEngine Model"
key = "ml_engine_model"
format_str = MLENGINE_MODEL_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
model_id: str,
project_id: str,
):
task_instance.xcom_push(
context,
key=MLEngineModelLink.key,
value={"model_id": model_id, "project_id": project_id},
)
class MLEngineModelsListLink(BaseGoogleLink):
"""Helper class for constructing ML Engine link."""
name = "MLEngine Models List"
key = "ml_engine_models_list"
format_str = MLENGINE_MODELS_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context,
key=MLEngineModelsListLink.key,
value={"project_id": project_id},
)
class MLEngineJobDetailsLink(BaseGoogleLink):
"""Helper class for constructing ML Engine link."""
name = "MLEngine Job Details"
key = "ml_engine_job_details"
format_str = MLENGINE_JOB_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
job_id: str,
project_id: str,
):
task_instance.xcom_push(
context,
key=MLEngineJobDetailsLink.key,
value={"job_id": job_id, "project_id": project_id},
)
class MLEngineModelVersionDetailsLink(BaseGoogleLink):
"""Helper class for constructing ML Engine link."""
name = "MLEngine Version Details"
key = "ml_engine_version_details"
format_str = MLENGINE_MODEL_VERSION_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
model_id: str,
project_id: str,
version_id: str,
):
task_instance.xcom_push(
context,
key=MLEngineModelVersionDetailsLink.key,
value={"model_id": model_id, "project_id": project_id, "version_id": version_id},
)
class MLEngineJobSListLink(BaseGoogleLink):
"""Helper class for constructing ML Engine link."""
name = "MLEngine Jobs List"
key = "ml_engine_jobs_list"
format_str = MLENGINE_JOBS_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
):
task_instance.xcom_push(
context,
key=MLEngineJobSListLink.key,
value={"project_id": project_id},
)
| 4,222 | 28.950355 | 101 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/datacatalog.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Data Catalog links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
DATACATALOG_BASE_LINK = "/datacatalog"
ENTRY_GROUP_LINK = (
DATACATALOG_BASE_LINK
+ "/groups/{entry_group_id};container={project_id};location={location_id}?project={project_id}"
)
ENTRY_LINK = (
DATACATALOG_BASE_LINK
+ "/projects/{project_id}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}\
?project={project_id}"
)
TAG_TEMPLATE_LINK = (
DATACATALOG_BASE_LINK
+ "/projects/{project_id}/locations/{location_id}/tagTemplates/{tag_template_id}?project={project_id}"
)
class DataCatalogEntryGroupLink(BaseGoogleLink):
"""Helper class for constructing Data Catalog Entry Group Link."""
name = "Data Catalog Entry Group"
key = "data_catalog_entry_group"
format_str = ENTRY_GROUP_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
entry_group_id: str,
location_id: str,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=DataCatalogEntryGroupLink.key,
value={"entry_group_id": entry_group_id, "location_id": location_id, "project_id": project_id},
)
class DataCatalogEntryLink(BaseGoogleLink):
"""Helper class for constructing Data Catalog Entry Link."""
name = "Data Catalog Entry"
key = "data_catalog_entry"
format_str = ENTRY_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
entry_id: str,
entry_group_id: str,
location_id: str,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=DataCatalogEntryLink.key,
value={
"entry_id": entry_id,
"entry_group_id": entry_group_id,
"location_id": location_id,
"project_id": project_id,
},
)
class DataCatalogTagTemplateLink(BaseGoogleLink):
"""Helper class for constructing Data Catalog Tag Template Link."""
name = "Data Catalog Tag Template"
key = "data_catalog_tag_template"
format_str = TAG_TEMPLATE_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
tag_template_id: str,
location_id: str,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=DataCatalogTagTemplateLink.key,
value={"tag_template_id": tag_template_id, "location_id": location_id, "project_id": project_id},
)
| 3,635 | 30.617391 | 109 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/cloud_build.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
BUILD_BASE_LINK = "/cloud-build"
BUILD_LINK = BUILD_BASE_LINK + "/builds;region={region}/{build_id}?project={project_id}"
BUILD_LIST_LINK = BUILD_BASE_LINK + "/builds;region={region}?project={project_id}"
BUILD_TRIGGERS_LIST_LINK = BUILD_BASE_LINK + "/triggers;region={region}?project={project_id}"
BUILD_TRIGGER_DETAILS_LINK = (
BUILD_BASE_LINK + "/triggers;region={region}/edit/{trigger_id}?project={project_id}"
)
class CloudBuildLink(BaseGoogleLink):
"""Helper class for constructing Cloud Build link."""
name = "Cloud Build Details"
key = "cloud_build_key"
format_str = BUILD_LINK
@staticmethod
def persist(
context: Context,
task_instance,
build_id: str,
project_id: str,
region: str,
):
task_instance.xcom_push(
context=context,
key=CloudBuildLink.key,
value={
"project_id": project_id,
"region": region,
"build_id": build_id,
},
)
class CloudBuildListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Build List link."""
name = "Cloud Builds List"
key = "cloud_build_list_key"
format_str = BUILD_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
region: str,
):
task_instance.xcom_push(
context=context,
key=CloudBuildListLink.key,
value={
"project_id": project_id,
"region": region,
},
)
class CloudBuildTriggersListLink(BaseGoogleLink):
"""Helper class for constructing Cloud Build Triggers List link."""
name = "Cloud Build Triggers List"
key = "cloud_build_triggers_list_key"
format_str = BUILD_TRIGGERS_LIST_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
region: str,
):
task_instance.xcom_push(
context=context,
key=CloudBuildTriggersListLink.key,
value={
"project_id": project_id,
"region": region,
},
)
class CloudBuildTriggerDetailsLink(BaseGoogleLink):
"""Helper class for constructing Cloud Build Trigger Details link."""
name = "Cloud Build Triggers Details"
key = "cloud_build_triggers_details_key"
format_str = BUILD_TRIGGER_DETAILS_LINK
@staticmethod
def persist(
context: Context,
task_instance,
project_id: str,
region: str,
trigger_id: str,
):
task_instance.xcom_push(
context=context,
key=CloudBuildTriggerDetailsLink.key,
value={
"project_id": project_id,
"region": region,
"trigger_id": trigger_id,
},
)
| 3,902 | 27.489051 | 93 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/pubsub.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Pub/Sub links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
PUBSUB_BASE_LINK = "/cloudpubsub"
PUBSUB_TOPIC_LINK = PUBSUB_BASE_LINK + "/topic/detail/{topic_id}?project={project_id}"
PUBSUB_SUBSCRIPTION_LINK = PUBSUB_BASE_LINK + "/subscription/detail/{subscription_id}?project={project_id}"
class PubSubTopicLink(BaseGoogleLink):
"""Helper class for constructing Pub/Sub Topic Link."""
name = "Pub/Sub Topic"
key = "pubsub_topic"
format_str = PUBSUB_TOPIC_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
topic_id: str,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=PubSubTopicLink.key,
value={"topic_id": topic_id, "project_id": project_id},
)
class PubSubSubscriptionLink(BaseGoogleLink):
"""Helper class for constructing Pub/Sub Subscription Link."""
name = "Pub/Sub Subscription"
key = "pubsub_subscription"
format_str = PUBSUB_SUBSCRIPTION_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
subscription_id: str | None,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=PubSubSubscriptionLink.key,
value={"subscription_id": subscription_id, "project_id": project_id},
)
| 2,418 | 31.689189 | 107 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/stackdriver.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Stackdriver links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
STACKDRIVER_BASE_LINK = "/monitoring/alerting"
STACKDRIVER_NOTIFICATIONS_LINK = STACKDRIVER_BASE_LINK + "/notifications?project={project_id}"
STACKDRIVER_POLICIES_LINK = STACKDRIVER_BASE_LINK + "/policies?project={project_id}"
class StackdriverNotificationsLink(BaseGoogleLink):
"""Helper class for constructing Stackdriver Notifications Link."""
name = "Cloud Monitoring Notifications"
key = "stackdriver_notifications"
format_str = STACKDRIVER_NOTIFICATIONS_LINK
@staticmethod
def persist(
operator_instance: BaseOperator,
context: Context,
project_id: str | None,
):
operator_instance.xcom_push(
context,
key=StackdriverNotificationsLink.key,
value={"project_id": project_id},
)
class StackdriverPoliciesLink(BaseGoogleLink):
"""Helper class for constructing Stackdriver Policies Link."""
name = "Cloud Monitoring Policies"
key = "stackdriver_policies"
format_str = STACKDRIVER_POLICIES_LINK
@staticmethod
def persist(
operator_instance: BaseOperator,
context: Context,
project_id: str | None,
):
operator_instance.xcom_push(
context,
key=StackdriverPoliciesLink.key,
value={"project_id": project_id},
)
| 2,408 | 32.458333 | 94 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/dataprep.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
BASE_LINK = "https://clouddataprep.com"
DATAPREP_FLOW_LINK = BASE_LINK + "/flows/{flow_id}?projectId={project_id}"
DATAPREP_JOB_GROUP_LINK = BASE_LINK + "/jobs/{job_group_id}?projectId={project_id}"
class DataprepFlowLink(BaseGoogleLink):
"""Helper class for constructing Dataprep flow link."""
name = "Flow details page"
key = "dataprep_flow_page"
format_str = DATAPREP_FLOW_LINK
@staticmethod
def persist(context: Context, task_instance, project_id: str, flow_id: int):
task_instance.xcom_push(
context=context,
key=DataprepFlowLink.key,
value={"project_id": project_id, "flow_id": flow_id},
)
class DataprepJobGroupLink(BaseGoogleLink):
"""Helper class for constructing Dataprep job group link."""
name = "Job group details page"
key = "dataprep_job_group_page"
format_str = DATAPREP_JOB_GROUP_LINK
@staticmethod
def persist(context: Context, task_instance, project_id: str, job_group_id: int):
task_instance.xcom_push(
context=context,
key=DataprepJobGroupLink.key,
value={
"project_id": project_id,
"job_group_id": job_group_id,
},
)
| 2,233 | 33.90625 | 85 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/bigquery.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google BigQuery links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
BIGQUERY_BASE_LINK = "/bigquery"
BIGQUERY_DATASET_LINK = (
BIGQUERY_BASE_LINK + "?referrer=search&project={project_id}&d={dataset_id}&p={project_id}&page=dataset"
)
BIGQUERY_TABLE_LINK = (
BIGQUERY_BASE_LINK
+ "?referrer=search&project={project_id}&d={dataset_id}&p={project_id}&page=table&t={table_id}"
)
class BigQueryDatasetLink(BaseGoogleLink):
"""Helper class for constructing BigQuery Dataset Link."""
name = "BigQuery Dataset"
key = "bigquery_dataset"
format_str = BIGQUERY_DATASET_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
dataset_id: str,
project_id: str,
):
task_instance.xcom_push(
context,
key=BigQueryDatasetLink.key,
value={"dataset_id": dataset_id, "project_id": project_id},
)
class BigQueryTableLink(BaseGoogleLink):
"""Helper class for constructing BigQuery Table Link."""
name = "BigQuery Table"
key = "bigquery_table"
format_str = BIGQUERY_TABLE_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
project_id: str,
table_id: str,
dataset_id: str | None = None,
):
task_instance.xcom_push(
context,
key=BigQueryTableLink.key,
value={"dataset_id": dataset_id, "project_id": project_id, "table_id": table_id},
)
| 2,527 | 30.6 | 107 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/spanner.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Spanner links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
SPANNER_BASE_LINK = "/spanner/instances"
SPANNER_INSTANCE_LINK = SPANNER_BASE_LINK + "/{instance_id}/details/databases?project={project_id}"
SPANNER_DATABASE_LINK = (
SPANNER_BASE_LINK + "/{instance_id}/databases/{database_id}/details/tables?project={project_id}"
)
class SpannerInstanceLink(BaseGoogleLink):
"""Helper class for constructing Spanner Instance Link."""
name = "Spanner Instance"
key = "spanner_instance"
format_str = SPANNER_INSTANCE_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
instance_id: str,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=SpannerInstanceLink.key,
value={"instance_id": instance_id, "project_id": project_id},
)
class SpannerDatabaseLink(BaseGoogleLink):
"""Helper class for constructing Spanner Database Link."""
name = "Spanner Database"
key = "spanner_database"
format_str = SPANNER_DATABASE_LINK
@staticmethod
def persist(
context: Context,
task_instance: BaseOperator,
instance_id: str,
database_id: str,
project_id: str | None,
):
task_instance.xcom_push(
context,
key=SpannerDatabaseLink.key,
value={"instance_id": instance_id, "database_id": database_id, "project_id": project_id},
)
| 2,505 | 31.545455 | 101 | py |
airflow | airflow-main/airflow/providers/google/cloud/links/cloud_tasks.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Cloud Tasks links."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.google.cloud.links.base import BaseGoogleLink
if TYPE_CHECKING:
from airflow.utils.context import Context
CLOUD_TASKS_BASE_LINK = "/cloudtasks"
CLOUD_TASKS_QUEUE_LINK = CLOUD_TASKS_BASE_LINK + "/queue/{location}/{queue_id}/tasks?project={project_id}"
CLOUD_TASKS_LINK = CLOUD_TASKS_BASE_LINK + "?project={project_id}"
class CloudTasksQueueLink(BaseGoogleLink):
"""Helper class for constructing Cloud Task Queue Link."""
name = "Cloud Tasks Queue"
key = "cloud_task_queue"
format_str = CLOUD_TASKS_QUEUE_LINK
@staticmethod
def extract_parts(queue_name: str | None):
"""
Extract project_id, location and queue id from queue name.
projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID.
"""
if not queue_name:
return "", "", ""
parts = queue_name.split("/")
return parts[1], parts[3], parts[5]
@staticmethod
def persist(
operator_instance: BaseOperator,
context: Context,
queue_name: str | None,
):
project_id, location, queue_id = CloudTasksQueueLink.extract_parts(queue_name)
operator_instance.xcom_push(
context,
key=CloudTasksQueueLink.key,
value={"project_id": project_id, "location": location, "queue_id": queue_id},
)
class CloudTasksLink(BaseGoogleLink):
"""Helper class for constructing Cloud Task Link."""
name = "Cloud Tasks"
key = "cloud_task"
format_str = CLOUD_TASKS_LINK
@staticmethod
def persist(
operator_instance: BaseOperator,
context: Context,
project_id: str | None,
):
operator_instance.xcom_push(
context,
key=CloudTasksLink.key,
value={"project_id": project_id},
)
| 2,770 | 31.6 | 106 | py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.