github.com/filecoin-project/bacalhau@v0.3.23-0.20230228154132-45c989550ace/integration/airflow/example_dags/example1.py (about)

     1  from datetime import datetime
     2  
     3  from airflow import DAG
     4  from bacalhau_airflow.operators import BacalhauSubmitJobOperator
     5  
     6  with DAG("run-me", start_date=datetime(2021, 1, 1)) as dag:
     7      op1 = BacalhauSubmitJobOperator(
     8          task_id="run-1",
     9          api_version="V1beta1",
    10          job_spec=dict(
    11              engine="Docker",
    12              verifier="Noop",
    13              publisher="Estuary",
    14              docker=dict(
    15                  image="ubuntu",
    16                  entrypoint=["echo", "Hello"],
    17              ),
    18              deal=dict(concurrency=1, confidence=0, min_bids=0),
    19          ),
    20      )
    21  
    22      op2 = BacalhauSubmitJobOperator(
    23          task_id="run-2",
    24          api_version="V1beta1",
    25          job_spec=dict(
    26              engine="Docker",
    27              verifier="Noop",
    28              publisher="Estuary",
    29              docker=dict(
    30                  image="ubuntu",
    31                  entrypoint=["echo", "World"],
    32              ),
    33              deal=dict(concurrency=1, confidence=0, min_bids=0),
    34              inputs=[
    35                  dict(
    36                      cid="QmWG3ZCXTbdMUh6GWq2Pb1n7MMNxPQFa9NMswdZXuVKFUX",
    37                      path="/another-dataset",
    38                      storagesource="ipfs",
    39                  )
    40              ],
    41          ),
    42          input_volumes=[
    43              "{{ task_instance.xcom_pull(task_ids='run-1', key='cids') }}:/datasets",
    44          ],
    45      )
    46  
    47      op1 >> op2