25
25
from sqlalchemy .orm import Session
26
26
27
27
from airflow .models import DagBag
28
- from airflow .models .asset import AssetModel
28
+ from airflow .models .asset import AssetAliasModel , AssetEvent , AssetModel
29
29
from airflow .providers .standard .operators .empty import EmptyOperator
30
30
from airflow .providers .standard .operators .trigger_dagrun import TriggerDagRunOperator
31
31
from airflow .providers .standard .sensors .external_task import ExternalTaskSensor
32
+ from airflow .sdk import Metadata , task
32
33
from airflow .sdk .definitions .asset import Asset , AssetAlias , Dataset
34
+ from airflow .utils import timezone
33
35
34
- from tests_common .test_utils .db import clear_db_runs
36
+ from tests_common .test_utils .db import clear_db_assets , clear_db_runs
35
37
36
38
pytestmark = pytest .mark .db_test
37
39
38
40
DAG_ID = "dag_with_multiple_versions"
39
41
DAG_ID_EXTERNAL_TRIGGER = "external_trigger"
42
+ DAG_ID_RESOLVED_ASSET_ALIAS = "dag_with_resolved_asset_alias"
40
43
LATEST_VERSION_DAG_RESPONSE : dict = {
41
44
"edges" : [],
42
45
"nodes" : [
@@ -95,8 +98,10 @@ def examples_dag_bag() -> DagBag:
95
98
@pytest .fixture (autouse = True )
96
99
def clean ():
97
100
clear_db_runs ()
101
+ clear_db_assets ()
98
102
yield
99
103
clear_db_runs ()
104
+ clear_db_assets ()
100
105
101
106
102
107
@pytest .fixture
@@ -115,15 +120,14 @@ def asset3() -> Dataset:
115
120
116
121
117
122
@pytest .fixture
118
- def make_dag (dag_maker , session , time_machine , asset1 : Asset , asset2 : Asset , asset3 : Dataset ) -> None :
123
+ def make_dags (dag_maker , session , time_machine , asset1 : Asset , asset2 : Asset , asset3 : Dataset ) -> None :
119
124
with dag_maker (
120
125
dag_id = DAG_ID_EXTERNAL_TRIGGER ,
121
126
serialized = True ,
122
127
session = session ,
123
128
start_date = pendulum .DateTime (2023 , 2 , 1 , 0 , 0 , 0 , tzinfo = pendulum .UTC ),
124
129
):
125
130
TriggerDagRunOperator (task_id = "trigger_dag_run_operator" , trigger_dag_id = DAG_ID )
126
-
127
131
dag_maker .sync_dagbag_to_db ()
128
132
129
133
with dag_maker (
@@ -138,7 +142,45 @@ def make_dag(dag_maker, session, time_machine, asset1: Asset, asset2: Asset, ass
138
142
>> ExternalTaskSensor (task_id = "external_task_sensor" , external_dag_id = DAG_ID )
139
143
>> EmptyOperator (task_id = "task_2" )
140
144
)
145
+ dag_maker .sync_dagbag_to_db ()
146
+
147
+ with dag_maker (
148
+ dag_id = DAG_ID_RESOLVED_ASSET_ALIAS ,
149
+ serialized = True ,
150
+ session = session ,
151
+ start_date = pendulum .DateTime (2023 , 2 , 1 , 0 , 0 , 0 , tzinfo = pendulum .UTC ),
152
+ ):
153
+
154
+ @task (outlets = [AssetAlias ("example-alias-resolved" )])
155
+ def task_1 (** context ):
156
+ yield Metadata (
157
+ asset = Asset ("resolved_example_asset_alias" ),
158
+ extra = {"k" : "v" }, # extra has to be provided, can be {}
159
+ alias = AssetAlias ("example-alias-resolved" ),
160
+ )
141
161
162
+ task_1 () >> EmptyOperator (task_id = "task_2" )
163
+
164
+ dr = dag_maker .create_dagrun ()
165
+ asset_alias = session .scalar (
166
+ select (AssetAliasModel ).where (AssetAliasModel .name == "example-alias-resolved" )
167
+ )
168
+ asset_model = AssetModel (name = "resolved_example_asset_alias" )
169
+ session .add (asset_model )
170
+ session .flush ()
171
+ asset_alias .assets .append (asset_model )
172
+ asset_alias .asset_events .append (
173
+ AssetEvent (
174
+ id = 1 ,
175
+ timestamp = timezone .parse ("2021-01-01T00:00:00" ),
176
+ asset_id = asset_model .id ,
177
+ source_dag_id = DAG_ID_RESOLVED_ASSET_ALIAS ,
178
+ source_task_id = "task_1" ,
179
+ source_run_id = dr .run_id ,
180
+ source_map_index = - 1 ,
181
+ )
182
+ )
183
+ session .commit ()
142
184
dag_maker .sync_dagbag_to_db ()
143
185
144
186
@@ -151,17 +193,17 @@ def _fetch_asset_id(asset: Asset, session: Session) -> str:
151
193
152
194
153
195
@pytest .fixture
154
- def asset1_id (make_dag , asset1 , session : Session ) -> str :
196
+ def asset1_id (make_dags , asset1 , session : Session ) -> str :
155
197
return _fetch_asset_id (asset1 , session )
156
198
157
199
158
200
@pytest .fixture
159
- def asset2_id (make_dag , asset2 , session ) -> str :
201
+ def asset2_id (make_dags , asset2 , session ) -> str :
160
202
return _fetch_asset_id (asset2 , session )
161
203
162
204
163
205
@pytest .fixture
164
- def asset3_id (make_dag , asset3 , session ) -> str :
206
+ def asset3_id (make_dags , asset3 , session ) -> str :
165
207
return _fetch_asset_id (asset3 , session )
166
208
167
209
@@ -296,13 +338,13 @@ class TestStructureDataEndpoint:
296
338
),
297
339
],
298
340
)
299
- @pytest .mark .usefixtures ("make_dag " )
341
+ @pytest .mark .usefixtures ("make_dags " )
300
342
def test_should_return_200 (self , test_client , params , expected ):
301
343
response = test_client .get ("/structure/structure_data" , params = params )
302
344
assert response .status_code == 200
303
345
assert response .json () == expected
304
346
305
- @pytest .mark .usefixtures ("make_dag " )
347
+ @pytest .mark .usefixtures ("make_dags " )
306
348
def test_should_return_200_with_asset (self , test_client , asset1_id , asset2_id , asset3_id ):
307
349
params = {
308
350
"dag_id" : DAG_ID ,
@@ -492,6 +534,75 @@ def test_should_return_200_with_asset(self, test_client, asset1_id, asset2_id, a
492
534
assert response .status_code == 200
493
535
assert response .json () == expected
494
536
537
+ @pytest .mark .usefixtures ("make_dags" )
538
+ def test_should_return_200_with_resolved_asset_alias_attached_to_the_corrrect_producing_task (
539
+ self , test_client , session
540
+ ):
541
+ resolved_asset = session .scalar (
542
+ session .query (AssetModel ).filter_by (name = "resolved_example_asset_alias" )
543
+ )
544
+ params = {
545
+ "dag_id" : DAG_ID_RESOLVED_ASSET_ALIAS ,
546
+ "external_dependencies" : True ,
547
+ }
548
+ expected = {
549
+ "edges" : [
550
+ {
551
+ "source_id" : "task_1" ,
552
+ "target_id" : "task_2" ,
553
+ "is_setup_teardown" : None ,
554
+ "label" : None ,
555
+ "is_source_asset" : None ,
556
+ },
557
+ {
558
+ "source_id" : "task_1" ,
559
+ "target_id" : f"asset:{ resolved_asset .id } " ,
560
+ "is_setup_teardown" : None ,
561
+ "label" : None ,
562
+ "is_source_asset" : None ,
563
+ },
564
+ ],
565
+ "nodes" : [
566
+ {
567
+ "id" : "task_1" ,
568
+ "label" : "task_1" ,
569
+ "type" : "task" ,
570
+ "children" : None ,
571
+ "is_mapped" : None ,
572
+ "tooltip" : None ,
573
+ "setup_teardown_type" : None ,
574
+ "operator" : "@task" ,
575
+ "asset_condition_type" : None ,
576
+ },
577
+ {
578
+ "id" : "task_2" ,
579
+ "label" : "task_2" ,
580
+ "type" : "task" ,
581
+ "children" : None ,
582
+ "is_mapped" : None ,
583
+ "tooltip" : None ,
584
+ "setup_teardown_type" : None ,
585
+ "operator" : "EmptyOperator" ,
586
+ "asset_condition_type" : None ,
587
+ },
588
+ {
589
+ "id" : f"asset:{ resolved_asset .id } " ,
590
+ "label" : "resolved_example_asset_alias" ,
591
+ "type" : "asset" ,
592
+ "children" : None ,
593
+ "is_mapped" : None ,
594
+ "tooltip" : None ,
595
+ "setup_teardown_type" : None ,
596
+ "operator" : None ,
597
+ "asset_condition_type" : None ,
598
+ },
599
+ ],
600
+ }
601
+
602
+ response = test_client .get ("/structure/structure_data" , params = params )
603
+ assert response .status_code == 200
604
+ assert response .json () == expected
605
+
495
606
@pytest .mark .parametrize (
496
607
"params, expected" ,
497
608
[
0 commit comments