apiVersion: "sparkoperator.k8s.io/v1beta2" kind: SparkApplication metadata: name: wordcount-s3 namespace: default spec: type: Scala mode: cluster image: "seedjeffwan/spark:v2.4.5" imagePullPolicy: Always mainClass: org.apache.spark.examples.JavaWordCount mainApplicationFile: "local:///opt/spark/examples/jars/spark-examples_2.11-2.4.5.jar" arguments: - s3a://spark-k8s-data/logs/ephemeral-storage-limit-range.yaml sparkVersion: "2.4.5" restartPolicy: type: Never sparkConf: # Option 2: S3 Suggest to use this way "spark.hadoop.fs.s3a.aws.credentials.provider": "com.amazonaws.auth.InstanceProfileCredentialsProvider" driver: cores: 1 coreLimit: "1024m" memory: "512m" serviceAccount: spark executor: cores: 1 instances: 1 memory: "1g"