add examples

This commit is contained in:
Manabu Mccloskey 2023-07-10 17:29:30 -07:00
parent 250f7a20fb
commit f6728e183e
6 changed files with 312 additions and 0 deletions

View file

@ -0,0 +1,104 @@
apiVersion: scaffolder.backstage.io/v1beta3
kind: Template
metadata:
name: apache-spark-argo-workflows
title: Apache Spark Application with Argo Workflows
description: Creates an Apache Spark Application with Argo Workflows
spec:
owner: guest
type: service
# these are the steps which are rendered in the frontend with the form input
parameters:
- title: Fill in some steps
required:
- name
- owner
properties:
name:
title: Application Name
type: string
description: Unique name of the component
ui:autofocus: true
ui:options:
rows: 5
owner:
title: Owner
type: string
description: Owner of the component
ui:field: OwnerPicker
ui:options:
catalogFilter:
kind: Group
# labels:
# title: Labels
# type: object
# additionalProperties:
# type: string
# description: Labels to apply to the application
# ui:autofocus: true
namespace:
title: Namespace
type: string
default: default
description: Namespace to deploy this application into. Optional. Defaults to application name.
ui:options:
rows: 5
- title: Fill in some steps
required:
- manifest
properties:
manifest:
type: string
description: 'Paste your Spark Application manifest here'
"ui:widget": "textarea"
"ui:options": {
"inputType": "data",
"rows": 15
}
ui:autofocus: true
"ui:placeholder": |
apiVersion: "sparkoperator.k8s.io/v1beta2"
kind: SparkApplication
metadata:
name:
spec:
steps:
- id: fetch-base
name: Fetch Base
action: fetch:template
input:
url: ./templates
values:
name: ${{ parameters.name }}
namespace: ${{ parameters.namespace }}
clusterName: 'cnoe-packaging-2'
# labels: ${{ parameters.labels }}
# manifest: ${{ parameters.manifest }}
- id: write
name: write-to-file
action: roadiehq:utils:fs:write
input:
path: input.yaml
content: ${{ parameters.manifest }}
- id: parse
name: deserialize
action: roadiehq:utils:fs:parse
input:
path: input.yaml
parser: yaml
- id: merge
name: Merge
action: roadiehq:utils:json:merge
input:
path: merge.json
content: ${{ steps.parse.output.content }}
- id: register
name: Register
action: catalog:register
input:
catalogInfoPath: 'catalog-info.yaml'
output:
links:
- title: Open in catalog
icon: catalog
entityRef: ${{ steps['register'].output.entityRef }}

View file

@ -0,0 +1,15 @@
---
apiVersion: backstage.io/v1alpha1
kind: Component
metadata:
name: ${{values.name | dump}}
annotations:
backstage.io/kubernetes-id: "${{values.clusterName}}"
apache-spark/cluster-name: "${{values.clusterName}}"
argo-workflows/cluster-name: "${{values.clusterName}}"
backstage.io/kubernetes-namespace: "${{values.namespace}}"
backstage.io/kubernetes-label-selector: "backstage.io/component-id=${{values.name}}"
spec:
type: job
lifecycle: experimental
owner: ${{ values.owner | default('guest')}}

View file

@ -0,0 +1,8 @@
{
"metadata": {
"namespace": "${{values.namespace}}",
"labels": {
"backstage.io/component-id": "${{values.name}}"
}
}
}

View file

@ -0,0 +1,130 @@
apiVersion: scaffolder.backstage.io/v1beta3
kind: Template
metadata:
name: apache-spark-direct-ui
title: Apache Spark job through GUI
description: Creates an Apache Spark Application directly without using outside schedulers
spec:
owner: guest
type: website
# these are the steps which are rendered in the frontend with the form input
parameters:
- title: About this Job
required:
- name
- owner
properties:
name:
title: Application Name
type: string
description: Unique name of the component
ui:autofocus: true
owner:
title: Owner
type: string
description: Owner of the component
default: guest
# ui:field: OwnerPicker
# ui:options:
# catalogFilter:
# kind: Group
namespace:
title: Namespace
type: string
default: default
description: Namespace to deploy this application into. Optional. Defaults to application name.
- title: Specs for this job
required:
- jobType
- image
- mainFile
- sparkVersion
properties:
jobType:
type: string
enum:
- "Python"
- "Java"
- "Scala"
- "R"
image:
type: string
default: "public.ecr.aws/r1l5w1y9/spark-operator:3.2.1-hadoop-3.3.1-java-11-scala-2.12-python-3.8-latest"
mainFile:
type: string
default: "local:///opt/spark/examples/src/main/python/pi.py"
sparkVersion:
type: string
enum:
- "3.1.1"
- "3.2.1"
- "3.3.1"
- "3.4.1"
driver:
type: object
properties:
driverCores:
type: integer
default: 1
driverMemory:
type: string
default: "512m"
executor:
type: object
properties:
executorCores:
type: integer
default: 1
executorMemory:
type: string
default: "512m"
executorInstances:
type: integer
default: 1
steps:
- id: create-repo
name: Create Repository
action: github:repo:create
input:
repoUrl: github.com?repo=spark-ui-${{parameters.name}}&owner=manabuOrg
- id: fetch-base
name: Fetch Base
action: fetch:template
input:
url: ./templates
values:
params: ${{parameters}}
clusterName: 'cnoe-packaging-2'
name: ${{parameters.name}}
namespace: ${{parameters.namespace}}
- id: init-repo
name: Initialize Repository
action: github:repo:push
input:
repoUrl: github.com?repo=spark-ui-${{parameters.name}}&owner=manabuOrg
defaultBranch: main
- id: deserialise
name: deserialize manifest
action: roadiehq:utils:fs:parse
input:
path: 'sparkJob.yaml'
parser: 'yaml'
- id: apply
name: apply manifest
action: cnoe:kubernetes:apply
input:
manifestObject: ${{ steps.deserialise.output.content }}
namespaced: true
clusterName: "cnoe-packaging-2"
- id: register
name: Register
action: catalog:register
input:
repoContentsUrl: ${{ steps['init-repo'].output.repoContentsUrl }}
catalogInfoPath: '/catalog-info.yaml'
output:
links:
- title: Open in catalog
icon: catalog
entityRef: ${{ steps['register'].output.entityRef }}

View file

@ -0,0 +1,15 @@
---
apiVersion: backstage.io/v1alpha1
kind: Component
metadata:
name: ${{values.name | dump}}
annotations:
backstage.io/kubernetes-id: "${{values.clusterName}}"
apache-spark/cluster-name: "${{values.clusterName}}"
argo-workflows/cluster-name: "${{values.clusterName}}"
backstage.io/kubernetes-namespace: "${{values.namespace}}"
backstage.io/kubernetes-label-selector: "backstage.io/component-id=${{values.name}}"
spec:
type: job
lifecycle: experimental
owner: ${{ values.owner | default('guest')}}

View file

@ -0,0 +1,40 @@
---
apiVersion: "sparkoperator.k8s.io/v1beta2"
kind: SparkApplication
metadata:
name: "ui-${{values.name}}"
namespace: ${{values.namespace | dump}}
labels:
backstage.io/component-id: ${{values.name | dump}}
spec:
type: Python
pythonVersion: "3"
mode: cluster
image: "${{values.params.image}}"
mainApplicationFile: "${{values.params.mainFile}}"
sparkVersion: "${{values.params.sparkVersion}}"
restartPolicy:
type: Never
volumes:
- name: "test-volume"
hostPath:
path: "/tmp"
type: Directory
driver:
cores: ${{values.params.driverCores | default(1)}}
memory: "${{values.params.driverMemory | default("512m")}}"
labels:
version: ${{values.params.sparkVersion}}
serviceAccount: spark
volumeMounts:
- name: "test-volume"
mountPath: "/tmp"
executor:
cores: ${{values.params.executorCores | default(1)}}
instances: ${{values.params.executorInstances | default(1)}}
memory: ${{values.params.executorMemory | default("512m")}}
labels:
version: ${{values.params.sparkVersion}}
volumeMounts:
- name: "test-volume"
mountPath: "/tmp"