github.com/argoproj/argo-cd/v2@v2.10.9/resource_customizations/sparkoperator.k8s.io/SparkApplication/testdata/healthy_dynamic_alloc_dstream.yaml (about)

     1  apiVersion: sparkoperator.k8s.io/v1beta2
     2  kind: SparkApplication
     3  metadata:
     4    generation: 4
     5    labels:
     6      argocd.argoproj.io/instance: spark-job
     7    name: spark-job-app
     8    namespace: spark-cluster
     9    resourceVersion: "31812990"
    10    uid: bfee52b0-74ca-4465-8005-f6643097ed64
    11  spec:
    12    executor:
    13      instances: 4
    14    sparkConf:
    15      spark.streaming.dynamicAllocation.enabled: 'true'
    16      spark.streaming.dynamicAllocation.maxExecutors: '10'
    17      spark.streaming.dynamicAllocation.minExecutors: '2'
    18  status:
    19    applicationState:
    20      state: RUNNING
    21    driverInfo:
    22      podName: ingestion-datalake-news-app-driver
    23      webUIAddress: 172.20.207.161:4040
    24      webUIPort: 4040
    25      webUIServiceName: ingestion-datalake-news-app-ui-svc
    26    executionAttempts: 13
    27    executorState:
    28      ingestion-datalake-news-app-1591613851251-exec-1: RUNNING
    29      ingestion-datalake-news-app-1591613851251-exec-4: RUNNING
    30      ingestion-datalake-news-app-1591613851251-exec-6: RUNNING
    31    lastSubmissionAttemptTime: "2020-06-08T10:57:32Z"
    32    sparkApplicationId: spark-a5920b2a5aa04d22a737c60759b5bf82
    33    submissionAttempts: 1
    34    submissionID: 3e713ec8-9f6c-4e78-ac28-749797c846f0
    35    terminationTime: null