diff --git a/plugins/apache-spark/.eslintrc.js b/plugins/apache-spark/.eslintrc.js deleted file mode 100644 index 998aac2..0000000 --- a/plugins/apache-spark/.eslintrc.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = require('@backstage/cli/config/eslint-factory')(__dirname, { - extends: ['prettier'], -}); diff --git a/plugins/apache-spark/.prettierignore b/plugins/apache-spark/.prettierignore deleted file mode 100644 index 5498e0f..0000000 --- a/plugins/apache-spark/.prettierignore +++ /dev/null @@ -1,2 +0,0 @@ -build -coverage diff --git a/plugins/apache-spark/README.md b/plugins/apache-spark/README.md deleted file mode 100644 index 96b35dd..0000000 --- a/plugins/apache-spark/README.md +++ /dev/null @@ -1,75 +0,0 @@ -# Apache Spark Plugin for Backstage - -Welcome to the Apache Spark plugin for Backstage! - -This plugin allows you to display information related to your Apache Spark Applications running in Kubernetes on Backstage - -## Getting started - -![GIF](doc/images/demo1.gif) - - -### Configuration - -[The Kubernetes plugin](https://backstage.io/docs/features/kubernetes/) must also be installed and enabled. - -Entities must be annotated with Kubernetes annotations. For example: -```yaml -apiVersion: backstage.io/v1alpha1 -kind: Component -metadata: - name: backstage - annotations: - backstage.io/kubernetes-namespace: default - backstage.io/kubernetes-label-selector: env=dev,my=label -``` - -Update your Entity page. For example: -```typescript -// in packages/app/src/components/catalog/EntityPage.tsx - -``` - - -#### Annotations -- `backstage.io/kubernetes-namespace`: Optional. Defaults to the `default` namespace. -- `backstage.io/kubernetes-label-selector`: Conditionally required. One of label selectors must be defined. -- `apache-spark/label-selector`: Conditionally required. One of label selectors must be defined. This value takes precedent over the one above. -- `apache-spark/cluster-name`: Optional. Specifies the name of Kubernetes cluster to retrieve information from. - -### Authentication - -This plugin uses the Kubernetes plugin for authentication. - -#### Using configured Kubernetes API - -The plugin uses configured Kubernetes clusters to fetch resources. - -For example, for a Kubernetes cluster given in your `app-config.yaml` - -```yaml -kubernetes: - serviceLocatorMethod: - type: "multiTenant" - clusterLocatorMethods: - - type: "config" - clusters: - - url: https://abcd.gr7.us-west-2.eks.amazonaws.com:443 - name: my-cluster-1 - authProvider: "serviceAccount" - serviceAccountToken: eyJh - caData: LS0t -``` - -For this configuration, the `argo-workflows/cluster-name` annotation value must be `my-cluster-1`. If this is not specified, the first cluster in the list is selected. - -```yaml -apiVersion: backstage.io/v1alpha1 -kind: Component -metadata: - name: backstage - annotations: - backstage.io/kubernetes-namespace: default - backstage.io/kubernetes-label-selector: env=dev,my=label - argo-workflows/cluster-name: my-cluster-1 -``` diff --git a/plugins/apache-spark/dev/index.tsx b/plugins/apache-spark/dev/index.tsx deleted file mode 100644 index 5f2b474..0000000 --- a/plugins/apache-spark/dev/index.tsx +++ /dev/null @@ -1,12 +0,0 @@ -import React from 'react'; -import { createDevApp } from '@backstage/dev-utils'; -import { apacheSparkPlugin, ApacheSparkPage } from '../src/plugin'; - -createDevApp() - .registerPlugin(apacheSparkPlugin) - .addPage({ - element: , - title: 'Root Page', - path: '/apache-spark' - }) - .render(); diff --git a/plugins/apache-spark/doc/images/demo1.gif b/plugins/apache-spark/doc/images/demo1.gif deleted file mode 100644 index 614e3ec..0000000 Binary files a/plugins/apache-spark/doc/images/demo1.gif and /dev/null differ diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml deleted file mode 100644 index defbcc8..0000000 --- a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml +++ /dev/null @@ -1,130 +0,0 @@ -apiVersion: scaffolder.backstage.io/v1beta3 -kind: Template -metadata: - name: apache-spark-argo-workflows - title: Apache Spark Application with Argo Workflows - description: Creates an Apache Spark Application with Argo Workflows -spec: - owner: guest - type: job - # these are the steps which are rendered in the frontend with the form input - parameters: - - title: Fill in some steps - required: - - name - - owner - properties: - name: - title: Application Name - type: string - description: Unique name of the component - ui:autofocus: true - ui:options: - rows: 5 - owner: - title: Owner - type: string - description: Owner of the component - ui:field: OwnerPicker - ui:options: - catalogFilter: - kind: Group - namespace: - title: Namespace - type: string - default: default - description: Namespace to deploy this application into. Optional. Defaults to application name. - ui:options: - rows: 5 - - title: Paste your Spark manifest - required: - - manifest - properties: - manifest: - type: string - description: 'Paste your Spark Application manifest here' - "ui:widget": "textarea" - "ui:options": { - "inputType": "data", - "rows": 15 - } - ui:autofocus: true - "ui:placeholder": | - apiVersion: "sparkoperator.k8s.io/v1beta2" - kind: SparkApplication - metadata: - name: - spec: - steps: - - id: fetch-base - name: Fetch Base - action: fetch:template - input: - url: ./templates/SparkManifest - values: - name: ${{ parameters.name }} - namespace: ${{ parameters.namespace }} - clusterName: 'cnoe-packaging' - - id: write - name: write-to-file - action: roadiehq:utils:fs:write - input: - path: input.yaml - content: ${{ parameters.manifest }} - - id: parse - name: deserialize - action: roadiehq:utils:fs:parse - input: - path: input.yaml - parser: yaml - - id: merge - name: Merge - action: roadiehq:utils:json:merge - input: - path: merge.json - content: ${{ steps.parse.output.content }} - - id: deserialise - name: deserialize manifest - action: roadiehq:utils:fs:parse - input: - path: 'merge.json' - parser: 'json' - - id: fetch-base - name: Fetch Base - action: fetch:template - input: - url: ./templates/ArgoWorkflowsManifest - values: - name: ${{ parameters.name }} - namespace: ${{ parameters.namespace }} - clusterName: 'cnoe-packaging' - manifest: ${{ steps.deserialise.output.content }} - - id: apply - name: apply manifest - action: cnoe:kubernetes:apply - input: - manifestPath: manifest.yaml - namespaced: true - clusterName: "cnoe-packaging" - - id: create-repo - name: Create Repository - action: github:repo:create - input: - repoUrl: github.com?repo=spark-ui-${{parameters.name}}&owner=manabuOrg - - id: init-repo - name: Initialize Repository - action: github:repo:push - input: - repoUrl: github.com?repo=spark-ui-${{parameters.name}}&owner=manabuOrg - defaultBranch: main - - id: register - name: Register - action: catalog:register - input: - catalogInfoPath: '/catalog-info.yaml' - repoContentsUrl: ${{ steps['init-repo'].output.repoContentsUrl }} - output: - links: - - title: Open in catalog - icon: catalog - entityRef: ${{ steps['register'].output.entityRef }} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/catalog-info.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/catalog-info.yaml deleted file mode 100644 index 663b059..0000000 --- a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/catalog-info.yaml +++ /dev/null @@ -1,15 +0,0 @@ ---- -apiVersion: backstage.io/v1alpha1 -kind: Component -metadata: - name: ${{values.name | dump}} - annotations: - backstage.io/kubernetes-id: "${{values.clusterName}}" - apache-spark/cluster-name: "${{values.clusterName}}" - argo-workflows/cluster-name: "${{values.clusterName}}" - backstage.io/kubernetes-namespace: "${{values.namespace}}" - backstage.io/kubernetes-label-selector: "backstage.io/component-id=${{values.name}}" -spec: - type: job - lifecycle: experimental - owner: ${{ values.owner | default('guest')}} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/manifest.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/manifest.yaml deleted file mode 100644 index d5de4b0..0000000 --- a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/manifest.yaml +++ /dev/null @@ -1,43 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - name: "spark-${{values.name}}" - namespace: "${{values.namespace}}" - labels: - backstage.io/component-id: "${{values.name}}" -spec: - arguments: {} - entrypoint: demo-workflow - serviceAccountName: argo-workflows - workflowMetadata: - labels: - backstage.io/component-id: "${{values.name}}" - templates: - - name: demo-workflow - steps: - - - name: prepare-resources - template: prepare-resources - - - name: run-sparkapp - template: run-sparkapp - - - name: cleanup-resources - template: cleanup-resources - - name: notify-users - template: cleanup-resources - - name: prepare-resources - container: - image: docker/whalesay - command: [ sleep ] - args: [ "10" ] - - name: run-sparkapp - resource: - action: create - setOwnerReference: true - successCondition: status.applicationState.state == COMPLETED - failureCondition: status.applicationState.state in (FAILED, ERROR) - manifest: | - ${{values.manifest | dump}} - - name: cleanup-resources - container: - image: docker/whalesay - command: [ sleep ] - args: [ "5" ] diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/SparkManifest/merge.json b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/SparkManifest/merge.json deleted file mode 100644 index 38fb893..0000000 --- a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/SparkManifest/merge.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "metadata": { - "namespace": "${{values.namespace}}", - "labels": { - "backstage.io/component-id": "${{values.name}}" - } - } -} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppUI/template.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppUI/template.yaml deleted file mode 100644 index 6699324..0000000 --- a/plugins/apache-spark/examples/scaffolder/SparkAppUI/template.yaml +++ /dev/null @@ -1,130 +0,0 @@ -apiVersion: scaffolder.backstage.io/v1beta3 -kind: Template -metadata: - name: apache-spark-direct-ui - title: Apache Spark job through GUI - description: Creates an Apache Spark Application directly without using outside schedulers -spec: - owner: guest - type: website - # these are the steps which are rendered in the frontend with the form input - parameters: - - title: About this Job - required: - - name - - owner - properties: - name: - title: Application Name - type: string - description: Unique name of the component - ui:autofocus: true - owner: - title: Owner - type: string - description: Owner of the component - default: guest -# ui:field: OwnerPicker -# ui:options: -# catalogFilter: -# kind: Group - namespace: - title: Namespace - type: string - default: default - description: Namespace to deploy this application into. Optional. Defaults to application name. - - title: Specs for this job - required: - - jobType - - image - - mainFile - - sparkVersion - properties: - jobType: - type: string - enum: - - "Python" - - "Java" - - "Scala" - - "R" - image: - type: string - default: "public.ecr.aws/r1l5w1y9/spark-operator:3.2.1-hadoop-3.3.1-java-11-scala-2.12-python-3.8-latest" - mainFile: - type: string - default: "local:///opt/spark/examples/src/main/python/pi.py" - sparkVersion: - type: string - enum: - - "3.1.1" - - "3.2.1" - - "3.3.1" - - "3.4.1" - driver: - type: object - properties: - driverCores: - type: integer - default: 1 - driverMemory: - type: string - default: "512m" - executor: - type: object - properties: - executorCores: - type: integer - default: 1 - executorMemory: - type: string - default: "512m" - executorInstances: - type: integer - default: 1 - steps: - - id: create-repo - name: Create Repository - action: github:repo:create - input: - repoUrl: github.com?repo=spark-ui-${{parameters.name}}&owner=manabuOrg - - id: fetch-base - name: Fetch Base - action: fetch:template - input: - url: ./templates - values: - params: ${{parameters}} - clusterName: 'cnoe-packaging' - name: ${{parameters.name}} - namespace: ${{parameters.namespace}} - - id: init-repo - name: Initialize Repository - action: github:repo:push - input: - repoUrl: github.com?repo=spark-ui-${{parameters.name}}&owner=manabuOrg - defaultBranch: main - - id: deserialise - name: deserialize manifest - action: roadiehq:utils:fs:parse - input: - path: 'sparkJob.yaml' - parser: 'yaml' - - id: apply - name: apply manifest - action: cnoe:kubernetes:apply - input: - manifestObject: ${{ steps.deserialise.output.content }} - namespaced: true - clusterName: "cnoe-packaging" - - id: register - name: Register - action: catalog:register - input: - repoContentsUrl: ${{ steps['init-repo'].output.repoContentsUrl }} - catalogInfoPath: '/catalog-info.yaml' - - output: - links: - - title: Open in catalog - icon: catalog - entityRef: ${{ steps['register'].output.entityRef }} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/catalog-info.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/catalog-info.yaml deleted file mode 100644 index 663b059..0000000 --- a/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/catalog-info.yaml +++ /dev/null @@ -1,15 +0,0 @@ ---- -apiVersion: backstage.io/v1alpha1 -kind: Component -metadata: - name: ${{values.name | dump}} - annotations: - backstage.io/kubernetes-id: "${{values.clusterName}}" - apache-spark/cluster-name: "${{values.clusterName}}" - argo-workflows/cluster-name: "${{values.clusterName}}" - backstage.io/kubernetes-namespace: "${{values.namespace}}" - backstage.io/kubernetes-label-selector: "backstage.io/component-id=${{values.name}}" -spec: - type: job - lifecycle: experimental - owner: ${{ values.owner | default('guest')}} diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/sparkJob.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/sparkJob.yaml deleted file mode 100644 index f47bffc..0000000 --- a/plugins/apache-spark/examples/scaffolder/SparkAppUI/templates/sparkJob.yaml +++ /dev/null @@ -1,40 +0,0 @@ ---- -apiVersion: "sparkoperator.k8s.io/v1beta2" -kind: SparkApplication -metadata: - name: "ui-${{values.name}}" - namespace: ${{values.namespace | dump}} - labels: - backstage.io/component-id: ${{values.name | dump}} -spec: - type: Python - pythonVersion: "3" - mode: cluster - image: "${{values.params.image}}" - mainApplicationFile: "${{values.params.mainFile}}" - sparkVersion: "${{values.params.sparkVersion}}" - restartPolicy: - type: Never - volumes: - - name: "test-volume" - hostPath: - path: "/tmp" - type: Directory - driver: - cores: ${{values.params.driverCores | default(1)}} - memory: "${{values.params.driverMemory | default("512m")}}" - labels: - version: ${{values.params.sparkVersion}} - serviceAccount: spark - volumeMounts: - - name: "test-volume" - mountPath: "/tmp" - executor: - cores: ${{values.params.executorCores | default(1)}} - instances: ${{values.params.executorInstances | default(1)}} - memory: ${{values.params.executorMemory | default("512m")}} - labels: - version: ${{values.params.sparkVersion}} - volumeMounts: - - name: "test-volume" - mountPath: "/tmp" diff --git a/plugins/apache-spark/package.json b/plugins/apache-spark/package.json deleted file mode 100644 index 73f1a9e..0000000 --- a/plugins/apache-spark/package.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "name": "@internal/plugin-apache-spark", - "version": "0.1.0", - "main": "src/index.ts", - "types": "src/index.ts", - "license": "Apache-2.0", - "private": true, - "publishConfig": { - "access": "public", - "main": "dist/index.esm.js", - "types": "dist/index.d.ts" - }, - "backstage": { - "role": "frontend-plugin" - }, - "scripts": { - "start": "backstage-cli package start", - "build": "backstage-cli package build", - "lint": "backstage-cli package lint", - "test": "backstage-cli package test --watch false", - "clean": "backstage-cli package clean", - "prepack": "backstage-cli package prepack", - "postpack": "backstage-cli package postpack" - }, - "dependencies": { - "@backstage/catalog-model": "^1.4.1", - "@backstage/core-components": "^0.13.1", - "@backstage/core-plugin-api": "^1.5.1", - "@backstage/plugin-catalog-react": "^1.7.0", - "@backstage/plugin-kubernetes": "^0.9.2", - "@backstage/theme": "^0.3.0", - "@material-ui/core": "^4.12.2", - "@material-ui/icons": "^4.9.1", - "@material-ui/lab": "4.0.0-alpha.61", - "react": "^17.0.0", - "react-dom": "^16.13.1 || ^17.0.0", - "react-router-dom": "6.0.0-beta.0 || ^6.3.0", - "react-use": "^17.2.4", - "typescript": "^3.7.5 || ^4.0.0 || ^5.0.0", - "yaml": "^2.3.1" - }, - "peerDependencies": { - "react": "^16.13.1 || ^17.0.0", - "react-router-dom": "^6.14.1" - }, - "devDependencies": { - "@backstage/cli": "^0.22.7", - "@backstage/core-app-api": "^1.8.0", - "@backstage/dev-utils": "^1.0.15", - "@backstage/test-utils": "^1.3.1", - "@testing-library/dom": ">=7.21.4", - "@testing-library/jest-dom": "^5.10.1", - "@testing-library/react": "^12.1.3", - "@testing-library/user-event": "^14.0.0", - "@types/node": "*", - "cross-fetch": "^3.1.5", - "msw": "^1.0.0" - }, - "files": [ - "dist" - ] -} diff --git a/plugins/apache-spark/pi-argo-workflows.yaml b/plugins/apache-spark/pi-argo-workflows.yaml deleted file mode 100644 index 361b46f..0000000 --- a/plugins/apache-spark/pi-argo-workflows.yaml +++ /dev/null @@ -1,60 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - name: spark-operator - namespace: default -spec: - arguments: {} - entrypoint: demo-workflow - serviceAccountName: argo-workflows - templates: - - name: demo-workflow - steps: - - - name: sleep - template: sleep - - - name: spark-operator - template: sparkapp - - name: sleep - container: - image: docker/whalesay - command: [ sleep ] - args: [ "60" ] - - name: sparkapp - resource: - action: create - setOwnerReference: true - successCondition: status.applicationState.state == COMPLETED - failureCondition: status.applicationState.state in (FAILED, ERROR) - manifest: | - apiVersion: "sparkoperator.k8s.io/v1beta2" - kind: SparkApplication - metadata: - generateName: pyspark-pi- - namespace: default - spec: - type: Python - pythonVersion: "3" - mode: cluster - image: "public.ecr.aws/r1l5w1y9/spark-operator:3.2.1-hadoop-3.3.1-java-11-scala-2.12-python-3.8-latest" - mainApplicationFile: "local:///opt/spark/examples/src/main/python/pi.py" - sparkVersion: "3.1.1" - restartPolicy: - type: OnFailure - onFailureRetries: 1 - onFailureRetryInterval: 10 - onSubmissionFailureRetries: 1 - onSubmissionFailureRetryInterval: 20 - driver: - cores: 1 - coreLimit: "1200m" - memory: "512m" - labels: - version: 3.1.1 - serviceAccount: spark - executor: - cores: 1 - instances: 2 - memory: "512m" - serviceAccount: spark - labels: - version: 3.1.1 diff --git a/plugins/apache-spark/pi-success.yaml b/plugins/apache-spark/pi-success.yaml deleted file mode 100644 index 377afc1..0000000 --- a/plugins/apache-spark/pi-success.yaml +++ /dev/null @@ -1,39 +0,0 @@ -apiVersion: "sparkoperator.k8s.io/v1beta2" -kind: SparkApplication -metadata: - generateName: spark-pi- - namespace: default -spec: - type: Python - pythonVersion: "3" - mode: cluster - image: "public.ecr.aws/r1l5w1y9/spark-operator:3.2.1-hadoop-3.3.1-java-11-scala-2.12-python-3.8-latest" - mainApplicationFile: "local:///opt/spark/examples/src/main/python/pi.py" - sparkVersion: "3.1.1" - restartPolicy: - type: Never - volumes: - - name: "test-volume" - hostPath: - path: "/tmp" - type: Directory - driver: - cores: 1 - coreLimit: "1200m" - memory: "512m" - labels: - version: 3.1.1 - serviceAccount: spark - volumeMounts: - - name: "test-volume" - mountPath: "/tmp" - executor: - cores: 1 - instances: 1 - memory: "512m" - labels: - version: 3.1.1 - volumeMounts: - - name: "test-volume" - mountPath: "/tmp" - diff --git a/plugins/apache-spark/pi.yaml b/plugins/apache-spark/pi.yaml deleted file mode 100644 index ca881ab..0000000 --- a/plugins/apache-spark/pi.yaml +++ /dev/null @@ -1,41 +0,0 @@ - -apiVersion: "sparkoperator.k8s.io/v1beta2" -kind: SparkApplication -metadata: -# name: spark-pi - generateName: spark-pi- - namespace: default -spec: - type: Python - pythonVersion: "3" - mode: cluster - image: "public.ecr.aws/m8u6z8z4/manabu-test:test-spark" - mainApplicationFile: "local:///opt/spark/examples/src/main/python/pi.py" - sparkVersion: "3.1.1" - restartPolicy: - type: Never - volumes: - - name: "test-volume" - hostPath: - path: "/tmp" - type: Directory - driver: - cores: 1 - coreLimit: "1200m" - memory: "512m" - labels: - version: 3.1.1 - serviceAccount: spark - volumeMounts: - - name: "test-volume" - mountPath: "/tmp" - executor: - cores: 1 - instances: 1 - memory: "512m" - labels: - version: 3.1.1 - volumeMounts: - - name: "test-volume" - mountPath: "/tmp" - diff --git a/plugins/apache-spark/rbac-argo-workflows.yaml b/plugins/apache-spark/rbac-argo-workflows.yaml deleted file mode 100644 index fd4e4d3..0000000 --- a/plugins/apache-spark/rbac-argo-workflows.yaml +++ /dev/null @@ -1,29 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: argo-workflows - namespace: default ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - namespace: default - name: argo-workflows-spark-full-control -rules: - - apiGroups: ["sparkoperator.k8s.io"] - resources: ["*"] - verbs: ["*"] ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: argo-workflows-spark - namespace: default -subjects: - - kind: ServiceAccount - name: argo-workflows - namespace: default -roleRef: - kind: Role - name: argo-workflows-spark-full-control - apiGroup: rbac.authorization.k8s.io diff --git a/plugins/apache-spark/rbac.yaml b/plugins/apache-spark/rbac.yaml deleted file mode 100644 index 89ea433..0000000 --- a/plugins/apache-spark/rbac.yaml +++ /dev/null @@ -1,35 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: spark - namespace: default ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - namespace: default - name: spark-role -rules: -- apiGroups: [""] - resources: ["pods"] - verbs: ["*"] -- apiGroups: [""] - resources: ["services"] - verbs: ["*"] -- apiGroups: [""] - resources: ["configmaps"] - verbs: ["*"] ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: spark-role-binding - namespace: default -subjects: -- kind: ServiceAccount - name: spark - namespace: default -roleRef: - kind: Role - name: spark-role - apiGroup: rbac.authorization.k8s.io diff --git a/plugins/apache-spark/src/api/index.test.ts b/plugins/apache-spark/src/api/index.test.ts deleted file mode 100644 index 8d0180c..0000000 --- a/plugins/apache-spark/src/api/index.test.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { ApacheSparkClient } from './index'; -import { ApacheSpark } from './model'; - -const mockKubernetesApi = { - proxy: jest.fn(), - getClusters: jest.fn(), - getObjectsByEntity: jest.fn(), - getWorkloadsByEntity: jest.fn(), - getCustomObjectsByEntity: jest.fn(), -}; - -describe('ApacheSparkClient', () => { - let apacheSparkClient: ApacheSparkClient; - - beforeEach(() => { - apacheSparkClient = new ApacheSparkClient(mockKubernetesApi); - }); - - afterEach(() => { - jest.clearAllMocks(); - }); - - it('should fetch Spark application logs', async () => { - mockKubernetesApi.proxy.mockResolvedValue({ - ok: true, - text: () => { - return 'logs'; - }, - }); - const logs = await apacheSparkClient.getLogs( - 'cluster1', - 'spark-namespace', - 'spark-pod-name', - 'abc', - ); - expect(logs).toEqual('logs'); - expect(mockKubernetesApi.proxy).toHaveBeenCalledWith({ - clusterName: 'cluster1', - path: '/api/v1/namespaces/spark-namespace/pods/spark-pod-name/log?tailLines=1000&container=abc', - }); - }); - - it('should throw error if Spark application logs are not fetched', async () => { - mockKubernetesApi.proxy.mockResolvedValueOnce({ - status: 500, - statusText: 'Internal Server Error', - ok: false, - text: () => { - return 'oh noes'; - }, - }); - - await expect( - apacheSparkClient.getLogs( - 'spark-app-name', - 'spark-namespace', - 'spark-pod-name', - 'abc', - ), - ).rejects.toEqual( - 'failed to fetch logs: 500, Internal Server Error, oh noes', - ); - }); - - // test getSparkApp method - it('should fetch Spark application', async () => { - // @ts-ignore - const mockResponse: ApacheSpark = { - apiVersion: 'sparkoperator.k8s.io/v1beta2', - kind: 'SparkApplication', - metadata: { - name: 'spark-app-name', - namespace: 'spark-namespace', - labels: { - app: 'spark-app-name', - }, - creationTimestamp: '2021-01-01T00:00:00Z', - }, - spec: { - image: 'abc', - mainApplicationFile: 'main.py', - mode: 'cluster', - sparkVersion: 'v3.1.1.', - type: 'Python', - driver: { - cores: 1, - }, - executor: { - cores: 1, - }, - }, - status: { - applicationState: { - state: 'RUNNING', - }, - }, - }; - - mockKubernetesApi.proxy.mockResolvedValue({ - ok: true, - text: () => { - return JSON.stringify(mockResponse); - }, - }); - - const application = await apacheSparkClient.getSparkApp( - 'spark-app-name', - 'spark-namespace', - 'abc', - ); - expect(application).toEqual(mockResponse); - }); -}); diff --git a/plugins/apache-spark/src/api/index.ts b/plugins/apache-spark/src/api/index.ts deleted file mode 100644 index cda9454..0000000 --- a/plugins/apache-spark/src/api/index.ts +++ /dev/null @@ -1,176 +0,0 @@ -import { createApiRef } from '@backstage/core-plugin-api'; -import { ApacheSpark, ApacheSparkList, Pod } from './model'; -import { KubernetesApi } from '@backstage/plugin-kubernetes'; - -export const apacheSparkApiRef = createApiRef({ - id: 'plugin.apachespark', -}); - -const API_VERSION = 'sparkoperator.k8s.io/v1beta2'; -const SPARK_APP_PLURAL = 'sparkapplications'; -const K8s_API_TIMEOUT = 'timeoutSeconds'; - -export interface ApacheSparkApi { - getSparkApps( - clusterName: string | undefined, - namespace: string | undefined, - labels: string | undefined, - ): Promise; - - getSparkApp( - clusterName: string | undefined, - namespace: string | undefined, - name: string, - ): Promise; - - getLogs( - clusterName: string | undefined, - namespace: string | undefined, - podName: string, - containerName?: string | undefined, - tailLine?: number, - ): Promise; - - getContainers( - clusterName: string | undefined, - namespace: string | undefined, - podName: string, - ): Promise; -} - -export class ApacheSparkClient implements ApacheSparkApi { - private kubernetesApi: KubernetesApi; - constructor(kubernetesApi: KubernetesApi) { - this.kubernetesApi = kubernetesApi; - } - async getSparkApps( - clusterName: string | undefined, - namespace: string | undefined, - labels: string | undefined, - ): Promise { - const ns = namespace !== undefined ? namespace : 'default'; - const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}`; - const query = new URLSearchParams({ - [K8s_API_TIMEOUT]: '30', - }); - if (labels) { - query.set('labelSelector', labels); - } - const resp = await this.kubernetesApi.proxy({ - clusterName: - clusterName !== undefined ? clusterName : await this.getFirstCluster(), - path: `${path}?${query.toString()}`, - }); - - if (!resp.ok) { - return Promise.reject( - `failed to fetch resources: ${resp.status}, ${ - resp.statusText - }, ${await resp.text()}`, - ); - } - const out = JSON.parse(await resp.text()); - this.removeManagedField(out); - return out; - } - - async getSparkApp( - clusterName: string | undefined, - namespace: string | undefined, - name: string, - ): Promise { - const ns = namespace !== undefined ? namespace : 'default'; - const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}/${name}`; - const resp = await this.kubernetesApi.proxy({ - clusterName: - clusterName !== undefined ? clusterName : await this.getFirstCluster(), - path: `${path}`, - }); - if (!resp.ok) { - return Promise.reject( - `failed to fetch resources: ${resp.status}, ${ - resp.statusText - }, ${await resp.text()}`, - ); - } - const out = JSON.parse(await resp.text()); - this.removeManagedField(out); - return out; - } - - async getLogs( - clusterName: string | undefined, - namespace: string | undefined, - podName: string, - containerName: string | undefined, - tailLine: number = 1000, - ): Promise { - const ns = namespace !== undefined ? namespace : 'default'; - const path = `/api/v1/namespaces/${ns}/pods/${podName}/log`; - const query = new URLSearchParams({ - tailLines: tailLine.toString(), - }); - if (containerName) { - query.set('container', containerName); - } - - const resp = await this.kubernetesApi.proxy({ - clusterName: - clusterName !== undefined ? clusterName : await this.getFirstCluster(), - path: `${path}?${query.toString()}`, - }); - if (!resp.ok) { - return Promise.reject( - `failed to fetch logs: ${resp.status}, ${ - resp.statusText - }, ${await resp.text()}`, - ); - } - return resp.text(); - } - - async getContainers( - clusterName: string | undefined, - namespace: string | undefined, - podName: string, - ): Promise { - const ns = namespace !== undefined ? namespace : 'default'; - const path = `/api/v1/namespaces/${ns}/pods/${podName}`; - const query = new URLSearchParams({ - [K8s_API_TIMEOUT]: '30', - }); - const resp = await this.kubernetesApi.proxy({ - clusterName: - clusterName !== undefined ? clusterName : await this.getFirstCluster(), - path: `${path}?${query.toString()}`, - }); - if (!resp.ok) { - throw new Error( - `failed to fetch logs: ${resp.status}, ${ - resp.statusText - }, ${await resp.text()}`, - ); - } - const pod = JSON.parse(await resp.text()) as Pod; - return pod.spec.containers.map(c => c.name); - } - - async getFirstCluster(): Promise { - const clusters = await this.kubernetesApi.getClusters(); - if (clusters.length > 0) { - return Promise.resolve(clusters[0].name); - } - return Promise.reject('no clusters found in configuration'); - } - - removeManagedField(spark: any) { - if (spark.metadata?.hasOwnProperty('managedFields')) { - delete spark.metadata.managedFields; - } - if (spark.items) { - for (const i of spark.items) { - this.removeManagedField(i); - } - } - } -} diff --git a/plugins/apache-spark/src/api/model.ts b/plugins/apache-spark/src/api/model.ts deleted file mode 100644 index 1d6455c..0000000 --- a/plugins/apache-spark/src/api/model.ts +++ /dev/null @@ -1,100 +0,0 @@ -export type Metadata = { - name: string; - namespace?: string; - labels?: Record; - annotations?: Record; - creationTimestamp: string; - managedFields?: any; -}; - -export type Spec = { - arguments?: string[]; - batchScheduler?: string; - driver: { - coreLimit?: string; - coreRequest?: string; - cores?: number; - gpu?: { - name: string; - quantity: number; - }; - labels?: Record; - memory?: string; - memoryOverhead?: string; - podName?: string; - schedulerName?: string; - serviceAccount?: string; - }; - executor: { - coreLimit?: string; - coreRequest?: string; - cores?: number; - gpu?: { - name: string; - quantity: number; - }; - instances?: number; - labels?: Record; - memory?: string; - memoryOverhead?: string; - schedulerName?: string; - serviceAccount?: string; - }; - image: string; - mainClass?: string; - mainApplicationFile?: string; - mode: string; - pythonVersion?: string; - sparkVersion: string; - type: string; -}; - -export type Status = { - applicationState: { - errorMessage?: string; - state: string; - }; - driverInfo?: { - podName: string; - webUIAddress: string; - webUIIngressAddress: string; - webUIIngressName: string; - webUIPort: string; - webUIServiceName: string; - }; - executionAttempts?: number; - executorState?: { [key: string]: string }; - lastSubmissionAttemptTime?: string; - sparkApplicationId?: string; - submissionAttempts?: number; - submissionID?: string; - terminationTime?: string; -}; - -export type ApacheSpark = { - apiVersion: string; - kind: string; - metadata: Metadata; - spec: Spec; - status: Status; -}; - -export type ApacheSparkList = { - apiVersion: string; - kind: string; - items?: ApacheSpark[]; -}; - -export type Pod = { - apiVersion: string; - kind: string; - metadata: Metadata; - spec: PodSpec; -}; - -export type PodSpec = { - containers: { - image: string; - name: string; - }[]; -}; diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx deleted file mode 100644 index 01d3ade..0000000 --- a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx +++ /dev/null @@ -1,83 +0,0 @@ -import React from 'react'; -import { render, screen } from '@testing-library/react'; -import { useApi } from '@backstage/core-plugin-api'; -import { useEntity } from '@backstage/plugin-catalog-react'; -import useAsync from 'react-use/lib/useAsync'; -import { ApacheSpark } from '../../api/model'; -import { ApacheSparkDriverLogs } from './ApacheSparkLogs'; -import { - APACHE_SPARK_LABEL_SELECTOR_ANNOTATION, - CLUSTER_NAME_ANNOTATION, - K8S_NAMESPACE_ANNOTATION, -} from '../../consts'; - -jest.mock('@backstage/core-plugin-api'); -jest.mock('react-use/lib/useAsync'); -jest.mock('@backstage/plugin-catalog-react'); - -jest.mock('@backstage/core-components', () => ({ - LogViewer: (props: { text: string }) => { - return
{props.text}
; - }, -})); - -describe('ApacheSparkDriverLogs', () => { - const mockUseApi = useApi as jest.MockedFunction; - const mockUseAsync = useAsync as jest.MockedFunction; - const mockUseEntity = useEntity as jest.MockedFunction; - const mockGetLogs = jest.fn(); - const mockSparkApp = { - status: { - driverInfo: { - podName: 'test-pod', - }, - }, - } as ApacheSpark; - - beforeEach(() => { - mockUseApi.mockReturnValue({ - getLogs: mockGetLogs, - }); - mockUseEntity.mockReturnValue({ - entity: { - apiVersion: 'version', - kind: 'kind', - metadata: { - name: 'name', - namespace: 'ns1', - annotations: { - [K8S_NAMESPACE_ANNOTATION]: 'k8s-ns', - [CLUSTER_NAME_ANNOTATION]: 'my-cluster', - [APACHE_SPARK_LABEL_SELECTOR_ANNOTATION]: 'env=test', - }, - }, - }, - }); - }); - - afterEach(() => { - jest.clearAllMocks(); - }); - - it('should render error message if there is an error', () => { - mockUseAsync.mockReturnValue({ - value: undefined, - loading: false, - error: new Error('Test error'), - }); - - render(); - expect(screen.getByText('Error: Test error')).toBeInTheDocument(); - expect(screen.getByRole('alert')).toBeInTheDocument(); - }); - - it('should render the log viewer with the fetched logs', async () => { - mockUseAsync.mockReturnValue({ - value: 'test logs', - loading: false, - error: undefined, - }); - render(); - expect(screen.getByText('test logs')).toBeInTheDocument(); - }); -}); diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx deleted file mode 100644 index e892856..0000000 --- a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx +++ /dev/null @@ -1,100 +0,0 @@ -import { useApi } from '@backstage/core-plugin-api'; -import { apacheSparkApiRef } from '../../api'; -import useAsync from 'react-use/lib/useAsync'; -import { ApacheSpark } from '../../api/model'; -import { - LogViewer, - Progress, - Select, - SelectedItems, - SelectItem, -} from '@backstage/core-components'; -import Alert from '@material-ui/lab/Alert'; -import React, { useEffect, useState } from 'react'; -import { useEntity } from '@backstage/plugin-catalog-react'; -import { getAnnotationValues } from '../utils'; - -export const ApacheSparkDriverLogs = (props: { sparkApp: ApacheSpark }) => { - const apiClient = useApi(apacheSparkApiRef); - const { entity } = useEntity(); - const { ns, clusterName } = getAnnotationValues(entity); - - const { value, loading, error } = useAsync(async (): Promise => { - return await apiClient.getLogs( - clusterName, - ns, - props.sparkApp.status.driverInfo?.podName!, - 'spark-kubernetes-driver', - ); - }, [props]); - if (loading) { - return ; - } else if (error) { - return {`${error}`}; - } - return ; -}; - -const ExecutorLogs = (props: { name: string }) => { - const apiClient = useApi(apacheSparkApiRef); - const { entity } = useEntity(); - const [logs, setLogs] = useState(''); - const { ns, clusterName } = getAnnotationValues(entity); - - useEffect(() => { - async function getLogs() { - try { - const val = await apiClient.getLogs( - clusterName, - ns, - props.name, - 'spark-kubernetes-executor', - ); - setLogs(val); - } catch (e) { - if (typeof e === 'string') { - setLogs(e); - } - } - } - if (props.name !== '') { - getLogs(); - } - }, [apiClient, clusterName, ns, props]); - - return ; -}; - -export const ApacheSparkExecutorLogs = (props: { sparkApp: ApacheSpark }) => { - const [selected, setSelected] = useState(''); - if (props.sparkApp.status.applicationState.state !== 'RUNNING') { - return ( - - Executor logs are only available for Spark Applications in RUNNING state - - ); - } - const executors: SelectItem[] = [{ label: '', value: '' }]; - for (const key in props.sparkApp.status.executorState) { - if (props.sparkApp.status.executorState.hasOwnProperty(key)) { - executors.push({ label: key, value: key }); - } - } - - const handleChange = (item: SelectedItems) => { - if (typeof item === 'string' && item !== '') { - setSelected(item); - } - }; - return ( - <> -