![]() ![]() SourceCode = inspect.getsource(_functionToExecute) t_upstream(operators)ĭef WriteDagToFile(_dag, _functionToExecute): Print(dagbag.dags, os.path.join(os.environ, 'dags')) Os.environ = 'True'įrom _operator import BashOperatorĭag = DAG('Amazing', default_args=default_args, schedule_interval=timedelta(days=1))ĭagbag = DagBag(dag_folder=os.path.join(os.environ, 'dags')) Import inspect # CORE: Inspect module for getting source code of a function.įrom datetime import datetime # CORE: Datetime module for date and time operations.įrom airflow.models import DAG, DagBag # PIP: Airflow DAG and DagBag modules for DAG operations.įrom _operator import PythonOperator # PIP: Airflow PythonOperator module for Python operations. INTO default_glue_catalog.database_a137bd.Import os # CORE: Operating System module for file system operations. INTO default_glue_catalog.database_a137bd.orders_raw_data ĬREATE SYNC JOB load_sales_info_raw_data_from_s3 Create streaming jobs to ingest raw orders and sales data into the staging tables.ĬREATE SYNC JOB load_orders_raw_data_from_s3 Create empty tables to use as staging for orders and sales.ĬREATE TABLE default_glue_catalog.database_a137bd.orders_raw_data()ĬREATE TABLE default_glue_catalog.database_a137bd.sales_info_raw_data() Create a connection to SQLake sample data source.ĪWS_ROLE = 'arn:aws:iam::949275490180:role/upsolver_samples_role' Run this code in SQLake /* Ingest data into SQLake */ Here is a code example of joining multiple S3 data sources into SQLake and applying simple enrichments to the data.
0 Comments
Leave a Reply. |
AuthorWrite something about yourself. No need to be fancy, just an overview. ArchivesCategories |