diff --git a/docs/modules/spark-k8s/examples/getting_started/application.yaml b/docs/modules/spark-k8s/examples/getting_started/application.yaml new file mode 100644 index 00000000..379f63ef --- /dev/null +++ b/docs/modules/spark-k8s/examples/getting_started/application.yaml @@ -0,0 +1,36 @@ +--- +apiVersion: spark.stackable.tech/v1alpha1 +kind: SparkApplication +metadata: + name: pyspark-pi # <1> + namespace: default +spec: + sparkImage: # <2> + productVersion: 3.5.7 + mode: cluster # <4> + mainApplicationFile: local:///stackable/spark/examples/src/main/python/pi.py # <4> + job: # <5> + config: + resources: + cpu: + min: "1" + max: "2" + memory: + limit: "1Gi" + driver: # <6> + config: + resources: + cpu: + min: "1" + max: "2" + memory: + limit: "1Gi" + executor: # <7> + replicas: 1 + config: + resources: + cpu: + min: "1" + max: "2" + memory: + limit: "1Gi" diff --git a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh index 53d92ae7..0c012c5a 100755 --- a/docs/modules/spark-k8s/examples/getting_started/getting_started.sh +++ b/docs/modules/spark-k8s/examples/getting_started/getting_started.sh @@ -47,36 +47,7 @@ esac echo "Creating a Spark Application..." # tag::install-sparkapp[] -kubectl apply -f - < `metadata.name` contains the name of the SparkApplication +<2> `spec.sparkImage`: the image used by the job, driver and executor pods. This can be a custom image built by the user or an official Stackable image. Available official images are stored in the Stackable https://oci.stackable.tech/[image registry,window=_blank]. Information on how to browse the registry can be found xref:contributor:project-overview.adoc#docker-images[here,window=_blank]. +<3> `spec.mode`: only `cluster` is currently supported +<4> `spec.mainApplicationFile`: the artifact (Java, Scala or Python) that forms the basis of the Spark job. This path is relative to the image, so in this case an example python script (that calculates the value of pi) is running: it is bundled with the Spark code and therefore already present in the job image -* `spec.driver`: driver-specific settings. -* `spec.executor`: executor-specific settings. +<5> `spec.job`: submit command specific settings. +<6> `spec.driver`: driver-specific settings. +<7> `spec.executor`: executor-specific settings. == Verify that it works