importdatetimeimportosimportairflowfromairflow.providers.common.sql.operators.sqlimportSQLExecuteQueryOperatorSQL_DATABASE=os.environ["SQL_DATABASE"]withairflow.DAG("airflow_db_connection_example",start_date=datetime.datetime(2025,1,1),schedule_interval=None,catchup=False)asdag:SQLExecuteQueryOperator(task_id="run_airflow_db_query",dag=dag,conn_id="airflow_db",database=SQL_DATABASE,sql="SELECT * FROM dag LIMIT 10;",)
[[["容易理解","easyToUnderstand","thumb-up"],["確實解決了我的問題","solvedMyProblem","thumb-up"],["其他","otherUp","thumb-up"]],[["難以理解","hardToUnderstand","thumb-down"],["資訊或程式碼範例有誤","incorrectInformationOrSampleCode","thumb-down"],["缺少我需要的資訊/範例","missingTheInformationSamplesINeed","thumb-down"],["翻譯問題","translationIssue","thumb-down"],["其他","otherDown","thumb-down"]],["上次更新時間:2025-06-16 (世界標準時間)。"],[[["This page outlines how to connect to and query the Cloud SQL instance that hosts the Airflow database for Cloud Composer environments."],["While direct access to the Airflow database is possible, it is generally recommended to utilize the Airflow REST API or CLI commands instead."],["You can execute SQL queries on the Airflow database by creating a DAG with `PostgresOperator` operators and specifying your SQL query in the `sql` parameter, while setting schedule intervals accordingly to prevent multiple runs."],["Directly adding custom tables or modifying the schema of the Airflow database is strictly prohibited."],["Backing up the Airflow database contents should be done using snapshots rather than dumping database contents to a bucket."]]],[]]