From f6728e183e1dbb758b66b13df80ed2d21b795018 Mon Sep 17 00:00:00 2001 From: Manabu Mccloskey Date: Mon, 10 Jul 2023 17:29:30 -0700 Subject: [PATCH] add examples --- .../SparkAppArgoWorkflow/template.yaml | 104 ++++++++++++++ .../templates/catalog-info.yaml | 15 ++ .../SparkAppArgoWorkflow/templates/merge.json | 8 ++ .../scaffolder/SparkAppUI/template.yaml | 130 ++++++++++++++++++ .../SparkAppUI/templates/catalog-info.yaml | 15 ++ .../SparkAppUI/templates/sparkJob.yaml | 40 ++++++ 6 files changed, 312 insertions(+) create mode 100644 plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml create mode 100644 plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/catalog-info.yaml create mode 100644 plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/merge.json create mode 100644 plugins/apache-spark/examples/scaffolder/SparkAppUI/template.yaml create mode 100644 plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/catalog-info.yaml create mode 100644 plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/sparkJob.yaml diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml new file mode 100644 index 0000000..210dc98 --- /dev/null +++ b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml @@ -0,0 +1,104 @@ +apiVersion: scaffolder.backstage.io/v1beta3 +kind: Template +metadata: + name: apache-spark-argo-workflows + title: Apache Spark Application with Argo Workflows + description: Creates an Apache Spark Application with Argo Workflows +spec: + owner: guest + type: service + # these are the steps which are rendered in the frontend with the form input + parameters: + - title: Fill in some steps + required: + - name + - owner + properties: + name: + title: Application Name + type: string + description: Unique name of the component + ui:autofocus: true + ui:options: + rows: 5 + owner: + title: Owner + type: string + description: Owner of the component + ui:field: OwnerPicker + ui:options: + catalogFilter: + kind: Group +# labels: +# title: Labels +# type: object +# additionalProperties: +# type: string +# description: Labels to apply to the application +# ui:autofocus: true + namespace: + title: Namespace + type: string + default: default + description: Namespace to deploy this application into. Optional. Defaults to application name. + ui:options: + rows: 5 + - title: Fill in some steps + required: + - manifest + properties: + manifest: + type: string + description: 'Paste your Spark Application manifest here' + "ui:widget": "textarea" + "ui:options": { + "inputType": "data", + "rows": 15 + } + ui:autofocus: true + "ui:placeholder": | + apiVersion: "sparkoperator.k8s.io/v1beta2" + kind: SparkApplication + metadata: + name: + spec: + steps: + - id: fetch-base + name: Fetch Base + action: fetch:template + input: + url: ./templates + values: + name: ${{ parameters.name }} + namespace: ${{ parameters.namespace }} + clusterName: 'cnoe-packaging-2' +# labels: ${{ parameters.labels }} +# manifest: ${{ parameters.manifest }} + - id: write + name: write-to-file + action: roadiehq:utils:fs:write + input: + path: input.yaml + content: ${{ parameters.manifest }} + - id: parse + name: deserialize + action: roadiehq:utils:fs:parse + input: + path: input.yaml + parser: yaml + - id: merge + name: Merge + action: roadiehq:utils:json:merge + input: + path: merge.json + content: ${{ steps.parse.output.content }} + - id: register + name: Register + action: catalog:register + input: + catalogInfoPath: 'catalog-info.yaml' + output: + links: + - title: Open in catalog + icon: catalog + entityRef: ${{ steps['register'].output.entityRef }} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/catalog-info.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/catalog-info.yaml new file mode 100644 index 0000000..663b059 --- /dev/null +++ b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/catalog-info.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: backstage.io/v1alpha1 +kind: Component +metadata: + name: ${{values.name | dump}} + annotations: + backstage.io/kubernetes-id: "${{values.clusterName}}" + apache-spark/cluster-name: "${{values.clusterName}}" + argo-workflows/cluster-name: "${{values.clusterName}}" + backstage.io/kubernetes-namespace: "${{values.namespace}}" + backstage.io/kubernetes-label-selector: "backstage.io/component-id=${{values.name}}" +spec: + type: job + lifecycle: experimental + owner: ${{ values.owner | default('guest')}} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/merge.json b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/merge.json new file mode 100644 index 0000000..38fb893 --- /dev/null +++ b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/merge.json @@ -0,0 +1,8 @@ +{ + "metadata": { + "namespace": "${{values.namespace}}", + "labels": { + "backstage.io/component-id": "${{values.name}}" + } + } +} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppUI/template.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppUI/template.yaml new file mode 100644 index 0000000..7dfdda1 --- /dev/null +++ b/plugins/apache-spark/examples/scaffolder/SparkAppUI/template.yaml @@ -0,0 +1,130 @@ +apiVersion: scaffolder.backstage.io/v1beta3 +kind: Template +metadata: + name: apache-spark-direct-ui + title: Apache Spark job through GUI + description: Creates an Apache Spark Application directly without using outside schedulers +spec: + owner: guest + type: website + # these are the steps which are rendered in the frontend with the form input + parameters: + - title: About this Job + required: + - name + - owner + properties: + name: + title: Application Name + type: string + description: Unique name of the component + ui:autofocus: true + owner: + title: Owner + type: string + description: Owner of the component + default: guest +# ui:field: OwnerPicker +# ui:options: +# catalogFilter: +# kind: Group + namespace: + title: Namespace + type: string + default: default + description: Namespace to deploy this application into. Optional. Defaults to application name. + - title: Specs for this job + required: + - jobType + - image + - mainFile + - sparkVersion + properties: + jobType: + type: string + enum: + - "Python" + - "Java" + - "Scala" + - "R" + image: + type: string + default: "public.ecr.aws/r1l5w1y9/spark-operator:3.2.1-hadoop-3.3.1-java-11-scala-2.12-python-3.8-latest" + mainFile: + type: string + default: "local:///opt/spark/examples/src/main/python/pi.py" + sparkVersion: + type: string + enum: + - "3.1.1" + - "3.2.1" + - "3.3.1" + - "3.4.1" + driver: + type: object + properties: + driverCores: + type: integer + default: 1 + driverMemory: + type: string + default: "512m" + executor: + type: object + properties: + executorCores: + type: integer + default: 1 + executorMemory: + type: string + default: "512m" + executorInstances: + type: integer + default: 1 + steps: + - id: create-repo + name: Create Repository + action: github:repo:create + input: + repoUrl: github.com?repo=spark-ui-${{parameters.name}}&owner=manabuOrg + - id: fetch-base + name: Fetch Base + action: fetch:template + input: + url: ./templates + values: + params: ${{parameters}} + clusterName: 'cnoe-packaging-2' + name: ${{parameters.name}} + namespace: ${{parameters.namespace}} + - id: init-repo + name: Initialize Repository + action: github:repo:push + input: + repoUrl: github.com?repo=spark-ui-${{parameters.name}}&owner=manabuOrg + defaultBranch: main + - id: deserialise + name: deserialize manifest + action: roadiehq:utils:fs:parse + input: + path: 'sparkJob.yaml' + parser: 'yaml' + - id: apply + name: apply manifest + action: cnoe:kubernetes:apply + input: + manifestObject: ${{ steps.deserialise.output.content }} + namespaced: true + clusterName: "cnoe-packaging-2" + - id: register + name: Register + action: catalog:register + input: + repoContentsUrl: ${{ steps['init-repo'].output.repoContentsUrl }} + catalogInfoPath: '/catalog-info.yaml' + + output: + links: + - title: Open in catalog + icon: catalog + entityRef: ${{ steps['register'].output.entityRef }} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/catalog-info.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/catalog-info.yaml new file mode 100644 index 0000000..663b059 --- /dev/null +++ b/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/catalog-info.yaml @@ -0,0 +1,15 @@ +--- +apiVersion: backstage.io/v1alpha1 +kind: Component +metadata: + name: ${{values.name | dump}} + annotations: + backstage.io/kubernetes-id: "${{values.clusterName}}" + apache-spark/cluster-name: "${{values.clusterName}}" + argo-workflows/cluster-name: "${{values.clusterName}}" + backstage.io/kubernetes-namespace: "${{values.namespace}}" + backstage.io/kubernetes-label-selector: "backstage.io/component-id=${{values.name}}" +spec: + type: job + lifecycle: experimental + owner: ${{ values.owner | default('guest')}} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/sparkJob.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/sparkJob.yaml new file mode 100644 index 0000000..f47bffc --- /dev/null +++ b/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/sparkJob.yaml @@ -0,0 +1,40 @@ +--- +apiVersion: "sparkoperator.k8s.io/v1beta2" +kind: SparkApplication +metadata: + name: "ui-${{values.name}}" + namespace: ${{values.namespace | dump}} + labels: + backstage.io/component-id: ${{values.name | dump}} +spec: + type: Python + pythonVersion: "3" + mode: cluster + image: "${{values.params.image}}" + mainApplicationFile: "${{values.params.mainFile}}" + sparkVersion: "${{values.params.sparkVersion}}" + restartPolicy: + type: Never + volumes: + - name: "test-volume" + hostPath: + path: "/tmp" + type: Directory + driver: + cores: ${{values.params.driverCores | default(1)}} + memory: "${{values.params.driverMemory | default("512m")}}" + labels: + version: ${{values.params.sparkVersion}} + serviceAccount: spark + volumeMounts: + - name: "test-volume" + mountPath: "/tmp" + executor: + cores: ${{values.params.executorCores | default(1)}} + instances: ${{values.params.executorInstances | default(1)}} + memory: ${{values.params.executorMemory | default("512m")}} + labels: + version: ${{values.params.sparkVersion}} + volumeMounts: + - name: "test-volume" + mountPath: "/tmp"