Skip to content

Commit f81cd6f

Browse files
Fix gcp remote log module import in airflow local settings (#49788)
1 parent 8276953 commit f81cd6f

File tree

3 files changed

+35
-3
lines changed

3 files changed

+35
-3
lines changed

airflow-core/src/airflow/config_templates/airflow_local_settings.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,7 @@
180180
)
181181
remote_task_handler_kwargs = {}
182182
elif remote_base_log_folder.startswith("gs://"):
183-
from airflow.providers.google.cloud.logs.gcs_task_handler import GCSRemoteLogIO
183+
from airflow.providers.google.cloud.log.gcs_task_handler import GCSRemoteLogIO
184184

185185
key_path = conf.get_mandatory_value("logging", "google_key_path", fallback=None)
186186

airflow-core/tests/unit/core/test_logging_config.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -295,6 +295,36 @@ def test_log_group_arns_remote_logging_with_cloudwatch_handler(
295295
assert isinstance(remote_io, CloudWatchRemoteLogIO)
296296
assert remote_io.log_group_arn == log_group_arn
297297

298+
def test_loading_remote_logging_with_gcs_handler(self):
299+
"""Test if logging can be configured successfully for GCS"""
300+
import airflow.logging_config
301+
from airflow.config_templates import airflow_local_settings
302+
from airflow.providers.google.cloud.log.gcs_task_handler import GCSRemoteLogIO
303+
304+
with conf_vars(
305+
{
306+
("logging", "remote_logging"): "True",
307+
("logging", "remote_log_conn_id"): "some_gcs",
308+
("logging", "remote_base_log_folder"): "gs://some-folder",
309+
("logging", "google_key_path"): "/gcs-key.json",
310+
(
311+
"logging",
312+
"remote_task_handler_kwargs",
313+
): '{"delete_local_copy": true, "project_id": "test-project", "gcp_keyfile_dict": {},"scopes": ["https://www.googleapis.com/auth/devstorage.read_write"]}',
314+
}
315+
):
316+
importlib.reload(airflow_local_settings)
317+
airflow.logging_config.configure_logging()
318+
319+
assert isinstance(airflow.logging_config.REMOTE_TASK_LOG, GCSRemoteLogIO)
320+
assert getattr(airflow.logging_config.REMOTE_TASK_LOG, "delete_local_copy") is True
321+
assert getattr(airflow.logging_config.REMOTE_TASK_LOG, "project_id") == "test-project"
322+
assert getattr(airflow.logging_config.REMOTE_TASK_LOG, "gcp_keyfile_dict") == {}
323+
assert getattr(airflow.logging_config.REMOTE_TASK_LOG, "scopes") == [
324+
"https://www.googleapis.com/auth/devstorage.read_write"
325+
]
326+
assert getattr(airflow.logging_config.REMOTE_TASK_LOG, "gcp_key_path") == "/gcs-key.json"
327+
298328
def test_loading_remote_logging_with_kwargs(self):
299329
"""Test if logging can be configured successfully with kwargs"""
300330
pytest.importorskip("airflow.providers.amazon", reason="'amazon' provider not installed")

providers/google/src/airflow/providers/google/cloud/log/gcs_task_handler.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,13 +61,15 @@ class GCSRemoteLogIO(LoggingMixin): # noqa: D101
6161
remote_base: str
6262
base_log_folder: Path = attrs.field(converter=Path)
6363
delete_local_copy: bool
64+
project_id: str
6465

6566
gcp_key_path: str | None
6667
gcp_keyfile_dict: dict | None
6768
scopes: Collection[str] | None
68-
project_id: str
6969

70-
def upload(self, path: os.PathLike, ti: RuntimeTI):
70+
processors = ()
71+
72+
def upload(self, path: os.PathLike | str, ti: RuntimeTI):
7173
"""Upload the given log path to the remote storage."""
7274
path = Path(path)
7375
if path.is_absolute():

0 commit comments

Comments
 (0)