From 0d0199e0e48e7a3f2f31ebc5051b6ca2ea2821f2 Mon Sep 17 00:00:00 2001 From: Manabu Mccloskey Date: Fri, 21 Jul 2023 09:00:55 -0700 Subject: [PATCH] fix spark plugin --- plugins/apache-spark/README.md | 3 +- .../SparkAppArgoWorkflow/template.yaml | 2 +- .../ArgoWorkflowsManifest/manifest.yaml | 25 +- plugins/apache-spark/package.json | 8 +- plugins/apache-spark/pi-argo-workflows.yaml | 60 ++++ plugins/apache-spark/pi-success.yaml | 18 +- plugins/apache-spark/pi.yaml | 16 +- plugins/apache-spark/rbac-argo-workflows.yaml | 29 ++ plugins/apache-spark/src/api/index.test.ts | 113 +++++++ plugins/apache-spark/src/api/index.ts | 6 +- plugins/apache-spark/src/api/model.ts | 22 +- .../ApacheSparkLogs/ApacheSparkLogs.test.tsx | 83 ++++++ .../ApacheSparkLogs/ApacheSparkLogs.tsx | 19 +- .../ApacheSparkOverviewTable.tsx | 18 +- .../DetailedDrawer/DetailedDrawer.tsx | 3 +- .../DetailedDrawer/DrawerOverview.tsx | 4 +- .../{Overvew => Overview}/Overview.tsx | 0 .../components/{Overvew => Overview}/index.ts | 0 plugins/apache-spark/src/components/utils.ts | 2 +- plugins/apache-spark/src/consts.ts | 6 + plugins/apache-spark/src/plugin.ts | 8 +- plugins/apache-spark/test/t | 276 ------------------ plugins/apache-spark/test/t.tgz | Bin 3005 -> 0 bytes 23 files changed, 360 insertions(+), 361 deletions(-) create mode 100644 plugins/apache-spark/pi-argo-workflows.yaml create mode 100644 plugins/apache-spark/rbac-argo-workflows.yaml create mode 100644 plugins/apache-spark/src/api/index.test.ts create mode 100644 plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx rename plugins/apache-spark/src/components/{Overvew => Overview}/Overview.tsx (100%) rename plugins/apache-spark/src/components/{Overvew => Overview}/index.ts (100%) create mode 100644 plugins/apache-spark/src/consts.ts delete mode 100644 plugins/apache-spark/test/t delete mode 100644 plugins/apache-spark/test/t.tgz diff --git a/plugins/apache-spark/README.md b/plugins/apache-spark/README.md index f4bf4dc..96b35dd 100644 --- a/plugins/apache-spark/README.md +++ b/plugins/apache-spark/README.md @@ -11,10 +11,9 @@ This plugin allows you to display information related to your Apache Spark Appli ### Configuration -Entities must be annotated with Kubernetes annotations. For example: - [The Kubernetes plugin](https://backstage.io/docs/features/kubernetes/) must also be installed and enabled. +Entities must be annotated with Kubernetes annotations. For example: ```yaml apiVersion: backstage.io/v1alpha1 kind: Component diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml index 000d270..defbcc8 100644 --- a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml +++ b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/template.yaml @@ -121,7 +121,7 @@ spec: name: Register action: catalog:register input: - catalogInfoPath: 'catalog-info.yaml' + catalogInfoPath: '/catalog-info.yaml' repoContentsUrl: ${{ steps['init-repo'].output.repoContentsUrl }} output: links: diff --git a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/manifest.yaml b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/manifest.yaml index 8a4ed87..d5de4b0 100644 --- a/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/manifest.yaml +++ b/plugins/apache-spark/examples/scaffolder/SparkAppArgoWorkflow/templates/ArgoWorkflowsManifest/manifest.yaml @@ -1,7 +1,7 @@ apiVersion: argoproj.io/v1alpha1 kind: Workflow metadata: - generateName: "spark-${{values.name}}-" + name: "spark-${{values.name}}" namespace: "${{values.namespace}}" labels: backstage.io/component-id: "${{values.name}}" @@ -15,16 +15,20 @@ spec: templates: - name: demo-workflow steps: - - - name: sleep - template: sleep - - - name: spark-operator - template: sparkapp - - name: sleep + - - name: prepare-resources + template: prepare-resources + - - name: run-sparkapp + template: run-sparkapp + - - name: cleanup-resources + template: cleanup-resources + - name: notify-users + template: cleanup-resources + - name: prepare-resources container: image: docker/whalesay command: [ sleep ] - args: [ "60" ] - - name: sparkapp + args: [ "10" ] + - name: run-sparkapp resource: action: create setOwnerReference: true @@ -32,3 +36,8 @@ spec: failureCondition: status.applicationState.state in (FAILED, ERROR) manifest: | ${{values.manifest | dump}} + - name: cleanup-resources + container: + image: docker/whalesay + command: [ sleep ] + args: [ "5" ] diff --git a/plugins/apache-spark/package.json b/plugins/apache-spark/package.json index f39369e..73f1a9e 100644 --- a/plugins/apache-spark/package.json +++ b/plugins/apache-spark/package.json @@ -17,12 +17,13 @@ "start": "backstage-cli package start", "build": "backstage-cli package build", "lint": "backstage-cli package lint", - "test": "backstage-cli package test", + "test": "backstage-cli package test --watch false", "clean": "backstage-cli package clean", "prepack": "backstage-cli package prepack", "postpack": "backstage-cli package postpack" }, "dependencies": { + "@backstage/catalog-model": "^1.4.1", "@backstage/core-components": "^0.13.1", "@backstage/core-plugin-api": "^1.5.1", "@backstage/plugin-catalog-react": "^1.7.0", @@ -31,7 +32,11 @@ "@material-ui/core": "^4.12.2", "@material-ui/icons": "^4.9.1", "@material-ui/lab": "4.0.0-alpha.61", + "react": "^17.0.0", + "react-dom": "^16.13.1 || ^17.0.0", + "react-router-dom": "6.0.0-beta.0 || ^6.3.0", "react-use": "^17.2.4", + "typescript": "^3.7.5 || ^4.0.0 || ^5.0.0", "yaml": "^2.3.1" }, "peerDependencies": { @@ -43,6 +48,7 @@ "@backstage/core-app-api": "^1.8.0", "@backstage/dev-utils": "^1.0.15", "@backstage/test-utils": "^1.3.1", + "@testing-library/dom": ">=7.21.4", "@testing-library/jest-dom": "^5.10.1", "@testing-library/react": "^12.1.3", "@testing-library/user-event": "^14.0.0", diff --git a/plugins/apache-spark/pi-argo-workflows.yaml b/plugins/apache-spark/pi-argo-workflows.yaml new file mode 100644 index 0000000..361b46f --- /dev/null +++ b/plugins/apache-spark/pi-argo-workflows.yaml @@ -0,0 +1,60 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + name: spark-operator + namespace: default +spec: + arguments: {} + entrypoint: demo-workflow + serviceAccountName: argo-workflows + templates: + - name: demo-workflow + steps: + - - name: sleep + template: sleep + - - name: spark-operator + template: sparkapp + - name: sleep + container: + image: docker/whalesay + command: [ sleep ] + args: [ "60" ] + - name: sparkapp + resource: + action: create + setOwnerReference: true + successCondition: status.applicationState.state == COMPLETED + failureCondition: status.applicationState.state in (FAILED, ERROR) + manifest: | + apiVersion: "sparkoperator.k8s.io/v1beta2" + kind: SparkApplication + metadata: + generateName: pyspark-pi- + namespace: default + spec: + type: Python + pythonVersion: "3" + mode: cluster + image: "public.ecr.aws/r1l5w1y9/spark-operator:3.2.1-hadoop-3.3.1-java-11-scala-2.12-python-3.8-latest" + mainApplicationFile: "local:///opt/spark/examples/src/main/python/pi.py" + sparkVersion: "3.1.1" + restartPolicy: + type: OnFailure + onFailureRetries: 1 + onFailureRetryInterval: 10 + onSubmissionFailureRetries: 1 + onSubmissionFailureRetryInterval: 20 + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + labels: + version: 3.1.1 + serviceAccount: spark + executor: + cores: 1 + instances: 2 + memory: "512m" + serviceAccount: spark + labels: + version: 3.1.1 diff --git a/plugins/apache-spark/pi-success.yaml b/plugins/apache-spark/pi-success.yaml index 4f88afe..377afc1 100644 --- a/plugins/apache-spark/pi-success.yaml +++ b/plugins/apache-spark/pi-success.yaml @@ -1,23 +1,7 @@ -# -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - apiVersion: "sparkoperator.k8s.io/v1beta2" kind: SparkApplication metadata: -# name: spark-pi - generateName: spark-pi + generateName: spark-pi- namespace: default spec: type: Python diff --git a/plugins/apache-spark/pi.yaml b/plugins/apache-spark/pi.yaml index 28a46a3..ca881ab 100644 --- a/plugins/apache-spark/pi.yaml +++ b/plugins/apache-spark/pi.yaml @@ -1,23 +1,9 @@ -# -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. apiVersion: "sparkoperator.k8s.io/v1beta2" kind: SparkApplication metadata: # name: spark-pi - generateName: spark-pi + generateName: spark-pi- namespace: default spec: type: Python diff --git a/plugins/apache-spark/rbac-argo-workflows.yaml b/plugins/apache-spark/rbac-argo-workflows.yaml new file mode 100644 index 0000000..fd4e4d3 --- /dev/null +++ b/plugins/apache-spark/rbac-argo-workflows.yaml @@ -0,0 +1,29 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: argo-workflows + namespace: default +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + namespace: default + name: argo-workflows-spark-full-control +rules: + - apiGroups: ["sparkoperator.k8s.io"] + resources: ["*"] + verbs: ["*"] +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: argo-workflows-spark + namespace: default +subjects: + - kind: ServiceAccount + name: argo-workflows + namespace: default +roleRef: + kind: Role + name: argo-workflows-spark-full-control + apiGroup: rbac.authorization.k8s.io diff --git a/plugins/apache-spark/src/api/index.test.ts b/plugins/apache-spark/src/api/index.test.ts new file mode 100644 index 0000000..8d0180c --- /dev/null +++ b/plugins/apache-spark/src/api/index.test.ts @@ -0,0 +1,113 @@ +import { ApacheSparkClient } from './index'; +import { ApacheSpark } from './model'; + +const mockKubernetesApi = { + proxy: jest.fn(), + getClusters: jest.fn(), + getObjectsByEntity: jest.fn(), + getWorkloadsByEntity: jest.fn(), + getCustomObjectsByEntity: jest.fn(), +}; + +describe('ApacheSparkClient', () => { + let apacheSparkClient: ApacheSparkClient; + + beforeEach(() => { + apacheSparkClient = new ApacheSparkClient(mockKubernetesApi); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should fetch Spark application logs', async () => { + mockKubernetesApi.proxy.mockResolvedValue({ + ok: true, + text: () => { + return 'logs'; + }, + }); + const logs = await apacheSparkClient.getLogs( + 'cluster1', + 'spark-namespace', + 'spark-pod-name', + 'abc', + ); + expect(logs).toEqual('logs'); + expect(mockKubernetesApi.proxy).toHaveBeenCalledWith({ + clusterName: 'cluster1', + path: '/api/v1/namespaces/spark-namespace/pods/spark-pod-name/log?tailLines=1000&container=abc', + }); + }); + + it('should throw error if Spark application logs are not fetched', async () => { + mockKubernetesApi.proxy.mockResolvedValueOnce({ + status: 500, + statusText: 'Internal Server Error', + ok: false, + text: () => { + return 'oh noes'; + }, + }); + + await expect( + apacheSparkClient.getLogs( + 'spark-app-name', + 'spark-namespace', + 'spark-pod-name', + 'abc', + ), + ).rejects.toEqual( + 'failed to fetch logs: 500, Internal Server Error, oh noes', + ); + }); + + // test getSparkApp method + it('should fetch Spark application', async () => { + // @ts-ignore + const mockResponse: ApacheSpark = { + apiVersion: 'sparkoperator.k8s.io/v1beta2', + kind: 'SparkApplication', + metadata: { + name: 'spark-app-name', + namespace: 'spark-namespace', + labels: { + app: 'spark-app-name', + }, + creationTimestamp: '2021-01-01T00:00:00Z', + }, + spec: { + image: 'abc', + mainApplicationFile: 'main.py', + mode: 'cluster', + sparkVersion: 'v3.1.1.', + type: 'Python', + driver: { + cores: 1, + }, + executor: { + cores: 1, + }, + }, + status: { + applicationState: { + state: 'RUNNING', + }, + }, + }; + + mockKubernetesApi.proxy.mockResolvedValue({ + ok: true, + text: () => { + return JSON.stringify(mockResponse); + }, + }); + + const application = await apacheSparkClient.getSparkApp( + 'spark-app-name', + 'spark-namespace', + 'abc', + ); + expect(application).toEqual(mockResponse); + }); +}); diff --git a/plugins/apache-spark/src/api/index.ts b/plugins/apache-spark/src/api/index.ts index 16c560a..cda9454 100644 --- a/plugins/apache-spark/src/api/index.ts +++ b/plugins/apache-spark/src/api/index.ts @@ -46,14 +46,16 @@ export class ApacheSparkClient implements ApacheSparkApi { async getSparkApps( clusterName: string | undefined, namespace: string | undefined, - labels: string, + labels: string | undefined, ): Promise { const ns = namespace !== undefined ? namespace : 'default'; const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}`; const query = new URLSearchParams({ [K8s_API_TIMEOUT]: '30', - // labelSelector: labels, }); + if (labels) { + query.set('labelSelector', labels); + } const resp = await this.kubernetesApi.proxy({ clusterName: clusterName !== undefined ? clusterName : await this.getFirstCluster(), diff --git a/plugins/apache-spark/src/api/model.ts b/plugins/apache-spark/src/api/model.ts index e5805fe..1d6455c 100644 --- a/plugins/apache-spark/src/api/model.ts +++ b/plugins/apache-spark/src/api/model.ts @@ -1,8 +1,8 @@ export type Metadata = { name: string; namespace?: string; - labels: Record; - annotations: Record; + labels?: Record; + annotations?: Record; creationTimestamp: string; managedFields?: any; }; @@ -51,10 +51,10 @@ export type Spec = { export type Status = { applicationState: { - errorMessage: string; + errorMessage?: string; state: string; }; - driverInfo: { + driverInfo?: { podName: string; webUIAddress: string; webUIIngressAddress: string; @@ -62,13 +62,13 @@ export type Status = { webUIPort: string; webUIServiceName: string; }; - executionAttempts: number; - executorState: { [key: string]: string }; - lastSubmissionAttemptTime: string; - sparkApplicationId: string; - submissionAttempts: number; - submissionID: string; - terminationTime: string; + executionAttempts?: number; + executorState?: { [key: string]: string }; + lastSubmissionAttemptTime?: string; + sparkApplicationId?: string; + submissionAttempts?: number; + submissionID?: string; + terminationTime?: string; }; export type ApacheSpark = { diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx new file mode 100644 index 0000000..01d3ade --- /dev/null +++ b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx @@ -0,0 +1,83 @@ +import React from 'react'; +import { render, screen } from '@testing-library/react'; +import { useApi } from '@backstage/core-plugin-api'; +import { useEntity } from '@backstage/plugin-catalog-react'; +import useAsync from 'react-use/lib/useAsync'; +import { ApacheSpark } from '../../api/model'; +import { ApacheSparkDriverLogs } from './ApacheSparkLogs'; +import { + APACHE_SPARK_LABEL_SELECTOR_ANNOTATION, + CLUSTER_NAME_ANNOTATION, + K8S_NAMESPACE_ANNOTATION, +} from '../../consts'; + +jest.mock('@backstage/core-plugin-api'); +jest.mock('react-use/lib/useAsync'); +jest.mock('@backstage/plugin-catalog-react'); + +jest.mock('@backstage/core-components', () => ({ + LogViewer: (props: { text: string }) => { + return
{props.text}
; + }, +})); + +describe('ApacheSparkDriverLogs', () => { + const mockUseApi = useApi as jest.MockedFunction; + const mockUseAsync = useAsync as jest.MockedFunction; + const mockUseEntity = useEntity as jest.MockedFunction; + const mockGetLogs = jest.fn(); + const mockSparkApp = { + status: { + driverInfo: { + podName: 'test-pod', + }, + }, + } as ApacheSpark; + + beforeEach(() => { + mockUseApi.mockReturnValue({ + getLogs: mockGetLogs, + }); + mockUseEntity.mockReturnValue({ + entity: { + apiVersion: 'version', + kind: 'kind', + metadata: { + name: 'name', + namespace: 'ns1', + annotations: { + [K8S_NAMESPACE_ANNOTATION]: 'k8s-ns', + [CLUSTER_NAME_ANNOTATION]: 'my-cluster', + [APACHE_SPARK_LABEL_SELECTOR_ANNOTATION]: 'env=test', + }, + }, + }, + }); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should render error message if there is an error', () => { + mockUseAsync.mockReturnValue({ + value: undefined, + loading: false, + error: new Error('Test error'), + }); + + render(); + expect(screen.getByText('Error: Test error')).toBeInTheDocument(); + expect(screen.getByRole('alert')).toBeInTheDocument(); + }); + + it('should render the log viewer with the fetched logs', async () => { + mockUseAsync.mockReturnValue({ + value: 'test logs', + loading: false, + error: undefined, + }); + render(); + expect(screen.getByText('test logs')).toBeInTheDocument(); + }); +}); diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx index 57cf939..e892856 100644 --- a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx +++ b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx @@ -11,15 +11,19 @@ import { } from '@backstage/core-components'; import Alert from '@material-ui/lab/Alert'; import React, { useEffect, useState } from 'react'; +import { useEntity } from '@backstage/plugin-catalog-react'; +import { getAnnotationValues } from '../utils'; export const ApacheSparkDriverLogs = (props: { sparkApp: ApacheSpark }) => { const apiClient = useApi(apacheSparkApiRef); + const { entity } = useEntity(); + const { ns, clusterName } = getAnnotationValues(entity); const { value, loading, error } = useAsync(async (): Promise => { return await apiClient.getLogs( - 'cnoe-packaging-2', - 'default', - props.sparkApp.status.driverInfo.podName, + clusterName, + ns, + props.sparkApp.status.driverInfo?.podName!, 'spark-kubernetes-driver', ); }, [props]); @@ -33,13 +37,16 @@ export const ApacheSparkDriverLogs = (props: { sparkApp: ApacheSpark }) => { const ExecutorLogs = (props: { name: string }) => { const apiClient = useApi(apacheSparkApiRef); + const { entity } = useEntity(); const [logs, setLogs] = useState(''); + const { ns, clusterName } = getAnnotationValues(entity); + useEffect(() => { async function getLogs() { try { const val = await apiClient.getLogs( - 'cnoe-packaging-2', - 'default', + clusterName, + ns, props.name, 'spark-kubernetes-executor', ); @@ -53,7 +60,7 @@ const ExecutorLogs = (props: { name: string }) => { if (props.name !== '') { getLogs(); } - }, [apiClient, props]); + }, [apiClient, clusterName, ns, props]); return ; }; diff --git a/plugins/apache-spark/src/components/ApacheSparkOverviewTable/ApacheSparkOverviewTable.tsx b/plugins/apache-spark/src/components/ApacheSparkOverviewTable/ApacheSparkOverviewTable.tsx index d1a3981..e54b062 100644 --- a/plugins/apache-spark/src/components/ApacheSparkOverviewTable/ApacheSparkOverviewTable.tsx +++ b/plugins/apache-spark/src/components/ApacheSparkOverviewTable/ApacheSparkOverviewTable.tsx @@ -7,16 +7,16 @@ import { Table, TableColumn, } from '@backstage/core-components'; -import { useEntity } from '@backstage/plugin-catalog-react'; import { useApi } from '@backstage/core-plugin-api'; import { apacheSparkApiRef } from '../../api'; import React, { useEffect, useState } from 'react'; -import { getAnnotationValues } from '../utils'; import useAsync from 'react-use/lib/useAsync'; import { ApacheSpark, ApacheSparkList } from '../../api/model'; import Alert from '@material-ui/lab/Alert'; import { createStyles, Drawer, makeStyles, Theme } from '@material-ui/core'; import { DrawerContent } from '../DetailedDrawer/DetailedDrawer'; +import { getAnnotationValues } from '../utils'; +import { useEntity } from '@backstage/plugin-catalog-react'; type TableData = { id: string; @@ -28,7 +28,7 @@ type TableData = { raw: ApacheSpark; }; -const columns: TableColumn[] = [ +const columns: TableColumn[] = [ { title: 'Name', field: 'name', @@ -57,21 +57,17 @@ const useDrawerStyles = makeStyles((theme: Theme) => ); export const ApacheSparkOverviewTable = () => { - // const { entity } = useEntity(); const apiClient = useApi(apacheSparkApiRef); const [columnData, setColumnData] = useState([] as TableData[]); const [isOpen, toggleDrawer] = useState(false); const [drawerData, setDrawerData] = useState({} as ApacheSpark); const classes = useDrawerStyles(); - // const { ns, clusterName, labelSelector } = getAnnotationValues(entity); + const { entity } = useEntity(); + const { ns, clusterName, labelSelector } = getAnnotationValues(entity); const { value, loading, error } = useAsync( async (): Promise => { - return await apiClient.getSparkApps( - 'cnoe-packaging-2', - 'default', - undefined, - ); + return await apiClient.getSparkApps(clusterName, ns, labelSelector); }, ); @@ -124,6 +120,8 @@ export const ApacheSparkOverviewTable = () => { paging: true, search: true, sorting: true, + pageSize: 10, + pageSizeOptions: [5, 10, 20, 50], }} onRowClick={(_event, rowData: TableData | undefined) => { setDrawerData(rowData?.raw!); diff --git a/plugins/apache-spark/src/components/DetailedDrawer/DetailedDrawer.tsx b/plugins/apache-spark/src/components/DetailedDrawer/DetailedDrawer.tsx index 386892b..fbaaddf 100644 --- a/plugins/apache-spark/src/components/DetailedDrawer/DetailedDrawer.tsx +++ b/plugins/apache-spark/src/components/DetailedDrawer/DetailedDrawer.tsx @@ -1,6 +1,5 @@ import { ApacheSpark } from '../../api/model'; import { - Button, createStyles, IconButton, makeStyles, @@ -87,7 +86,7 @@ export const DrawerContent = ({ - + <>
diff --git a/plugins/apache-spark/src/components/DetailedDrawer/DrawerOverview.tsx b/plugins/apache-spark/src/components/DetailedDrawer/DrawerOverview.tsx index 471a86a..1a4d373 100644 --- a/plugins/apache-spark/src/components/DetailedDrawer/DrawerOverview.tsx +++ b/plugins/apache-spark/src/components/DetailedDrawer/DrawerOverview.tsx @@ -1,4 +1,4 @@ -import { createStyles, makeStyles, Theme } from '@material-ui/core'; +import { createStyles, makeStyles } from '@material-ui/core'; import { ApacheSpark } from '../../api/model'; import { InfoCard, @@ -47,7 +47,7 @@ function generateMetadata(sparkApp: ApacheSpark): generateMetadataOutput { } } out.app = app; - out.driver = sparkApp.status.driverInfo; + out.driver = sparkApp.status.driverInfo ? sparkApp.status.driverInfo : {}; out.executor = executor; return out; } diff --git a/plugins/apache-spark/src/components/Overvew/Overview.tsx b/plugins/apache-spark/src/components/Overview/Overview.tsx similarity index 100% rename from plugins/apache-spark/src/components/Overvew/Overview.tsx rename to plugins/apache-spark/src/components/Overview/Overview.tsx diff --git a/plugins/apache-spark/src/components/Overvew/index.ts b/plugins/apache-spark/src/components/Overview/index.ts similarity index 100% rename from plugins/apache-spark/src/components/Overvew/index.ts rename to plugins/apache-spark/src/components/Overview/index.ts diff --git a/plugins/apache-spark/src/components/utils.ts b/plugins/apache-spark/src/components/utils.ts index 4c4a5f1..95817d2 100644 --- a/plugins/apache-spark/src/components/utils.ts +++ b/plugins/apache-spark/src/components/utils.ts @@ -4,7 +4,7 @@ import { CLUSTER_NAME_ANNOTATION, K8S_LABEL_SELECTOR_ANNOTATION, K8S_NAMESPACE_ANNOTATION, -} from '../plugin'; +} from '../consts'; export type getAnnotationValuesOutput = { ns: string; diff --git a/plugins/apache-spark/src/consts.ts b/plugins/apache-spark/src/consts.ts new file mode 100644 index 0000000..2a2659f --- /dev/null +++ b/plugins/apache-spark/src/consts.ts @@ -0,0 +1,6 @@ +export const APACHE_SPARK_LABEL_SELECTOR_ANNOTATION = + 'apache-spark/label-selector'; +export const CLUSTER_NAME_ANNOTATION = 'apache-spark/cluster-name'; +export const K8S_LABEL_SELECTOR_ANNOTATION = + 'backstage.io/kubernetes-label-selector'; +export const K8S_NAMESPACE_ANNOTATION = 'backstage.io/kubernetes-namespace'; diff --git a/plugins/apache-spark/src/plugin.ts b/plugins/apache-spark/src/plugin.ts index 7f6c0ac..0578409 100644 --- a/plugins/apache-spark/src/plugin.ts +++ b/plugins/apache-spark/src/plugin.ts @@ -8,12 +8,6 @@ import { rootRouteRef } from './routes'; import { apacheSparkApiRef, ApacheSparkClient } from './api'; import { kubernetesApiRef } from '@backstage/plugin-kubernetes'; -export const APACHE_SPARK_LABEL_SELECTOR_ANNOTATION = - 'apache-spark/label-selector'; -export const CLUSTER_NAME_ANNOTATION = 'apache-spark/cluster-name'; -export const K8S_LABEL_SELECTOR_ANNOTATION = - 'backstage.io/kubernetes-label-selector'; -export const K8S_NAMESPACE_ANNOTATION = 'backstage.io/kubernetes-namespace'; export const apacheSparkPlugin = createPlugin({ id: 'apache-spark', routes: { @@ -34,7 +28,7 @@ export const ApacheSparkPage = apacheSparkPlugin.provide( createRoutableExtension({ name: 'ApacheSparkPage', component: () => - import('./components/Overvew').then(m => m.ApacheSparkOverviewPage), + import('./components/Overview').then(m => m.ApacheSparkOverviewPage), mountPoint: rootRouteRef, }), ); diff --git a/plugins/apache-spark/test/t b/plugins/apache-spark/test/t deleted file mode 100644 index 2d05bfb..0000000 --- a/plugins/apache-spark/test/t +++ /dev/null @@ -1,276 +0,0 @@ - - - - - - - - PythonPi - Spark Jobs - - - -
-
-
-

- Spark Jobs - - (?) - -

-
-
-
-
-
-
    -
  • - - User: - root -
  • -
  • - Total Uptime: - 55 min -
  • -
  • - Scheduling Mode: - FIFO -
  • - -
  • - Completed Jobs: - 1 -
  • - -
-
- - - Event Timeline - - -

- - Completed Jobs (1) -

-
-
-
-
-
- - - - - - - - - -
-
-
- Page: -
    - - -
  • 1
  • - - -
-
-
- - - - - - - - - - - - - - -
- - - Job Id ▾ - - - - - - Description - - - - - - Submitted - - - - - - Duration - - - - - Stages: Succeeded/Total - - - - Tasks (for all stages): Succeeded/Total - -
- 0 - - reduce at /opt/spark/examples/src/main/python/pi.py:43 - reduce at /opt/spark/examples/src/main/python/pi.py:43 - - 2023/06/30 22:59:33 - 14 s - 1/1 - - - -
- - 2/2 - - - - - -
-
-
-
-
-
-
- - - - - - - - - -
-
-
- Page: -
    - - -
  • 1
  • - - -
-
-
-
-
-
-
-
- - \ No newline at end of file diff --git a/plugins/apache-spark/test/t.tgz b/plugins/apache-spark/test/t.tgz deleted file mode 100644 index d60cc24390cf293fcfafa417798f43a6f9a51604..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3005 zcmV;u3qtfCiwFP!00000|Lj}cbK5o&fA?Rp$4n!8YKoF&JE<(GlP1nhGQDeBCl7Z` zA3$(s?xGrCzi&jr`WwX3gFbRfvzmX5})_6(sj2x1}?>Sd^`k>UbIAh6d=`mqVbOYD$=^0>W@PHAEQYA3!2oI!a$N zR57Ax0!l|he}C+$_sH#GNwI+(hydjkzuYblBDB=@^PS)14hz9K=XcoSTmorjgP_y0P70jj8GaO$kXSMKhHeZb0<*ymaYj-w28Bgu%-wW5Bws$Yw z>s{z)JY;x(oD0uGKccIJV49}tei+}QMYT82y|82=x<_ughZ+~+hdoQv?})-TM90Hp`wva;7YnI_Oal!B{{jv$AHhT<+$T@ACzEvR#^E^Sk&)TJ zXvn0gX1IEDtz*I8P;lYGOorFOR|_xiETg~I4F3yXgx3u^oNORqM5(32$;KxV@;@$a zQ^G?x3CjjXqgaC<6%UE;9y>K5fJI0dJTT2wcs6%YY{~ZWf@`f zF%E=8lcI0w1cEV=DlD6cj8HF~M>&b-RWFg0xF+?Jf>j(k-MKYkxOCQpRjrKpW?#Lv z42_-9h-j;Wy`5@m3*g$VxawrAdSa*oA#~SkQzd5n62a z=^=qbzM)d^1iAK{)p`B)pg?Xh$h44Veb3A1hXnBA$M4PZ{`h?ll$WtvI2_#S(pg9w z0YjGyj$mS>>oXC2=tnT6rQi|Rc)WDVXzi4w4NaGeD@jtPiscJ0pVcSU0M(jF(v;D_ zdQN!vYAA9H`NvkASa_T?Ut6A%YeG3ncg->y?u_|TT-^}q1Hs(sQC~yx4D6xO#y?F1 z!d${=#OL4(twnMMS25M-+Et%JF7g?O4azkWS96GI7$QIG#a64KdKfZBQ$^L;Kq645 zP6H7-OUnuS3(^jd#bKg|k0~PoCVAmRq)xf$&)`z{Kow|7qX;F2;9mpbHYNyu2#kI} z2t>y9{s9OHifr(idW&2Ak}}*s7{hZ$gAX7$5KBu7P>4YJTF4|{b>fRmgN_k$9}8j) zny3#7C|5K@pnB}AzBQJAuvZvFEm&;?@$7e}7yq*bwC%8tt&A$|X53mw$Lbhe?R7S6 zzGdDil=I;YB!$o|K@5=&%^{L+g(eK%rRIROgY?M>B$R&+@rxkFFk_TQ@DCB<9EKUZ zeD(5&8_2gg*#ssKNy<=T=&CG9h`b)kf6M&(Q8n6GmB^l#O?F?@g7-2|^KaTSq#Xd8 zt~NKNmRBpz91VX~)hjCex1Hb;Da94oUpHVvS?OASaOZw5w(dnNt`uCw$j!b}W^YNO z5yC@2lg-*E+h0XLxD=FQXf`s#wI0I@Ng}&t11T6Am#WD3^f@O!L-@B4iLn7gE>j4J zFuzwZA~RDz3XuTip2+lg*xA&|KHnhM@U~DwrRrkQ3i<7}G-fiW>28W#Z@%`QZjg|I zxhdhuoK7Iw#mO~-xxL7_pgc9+%$`hw7=sVK*qXJo?X8hknnH;mb0u~|UCU!NUP|-J zs}+qx<0aXUytP6VN^AFVN}_{=rC_CO5&!^db&nxWcfFA z$S^`<%o2&&hnooUGP*N0$4CcaG-=1uIXOwU&Z-bdVxa5YkkfWa7-Hz=^c>hO9Ww*2 zi@yccqZZMr?ULK2b7UW`^M}>KGaO28HVN7;omok6`os)+X80{U(|aQ>iHX<@Bb!a` z8rrcP^i$5V0dC=`jqzhf{dSdpJM6Ba+3mX)Fh5j%?%F-wwRS2C0z|WHm%_itrMoEk z?q0q%p=>vs+{zjYF|c*wWE5!yQ@hV1VrqDZ;J9FbfcfhWDOG_4~Y* z_TIxSF}a9(?%{3a? z&|;L?Q~G4mq1Ld)RflA*pZiqm)6?nf1y&alSxHxM7$Y6@>iglY`0vm^yA z7GB=I3$Dp%2HE8cdzZ*x}0R_47zaiboWl8EyD_1x|{R6P&#Td1^Nz2PpF>i`$l z4$-w4r?aZ7Ii5&$UdyRUIi8Y%E#wq+oT`jd2-!$g?tJfF3g7#dhRZr_H6)7^>T^*y zK8)C*uYo$*jw#b)P@T4_*lkS3O{It=DrJ+YhJ8)F`aZGyu!G<(Qq41RIi_=ySftGM zbAkNJg8;ckc?PLa)R@AY_)4%$<5^x$o=+!}ubRicBkyP@;2{xOcaQ@JU7tv(MXC?3 z1=bt|Oed4^@I~CkaueinRQtQ#`T%479ueWK{)A^({|Lpm0>I(Tm`>)c%pSQHo zuz&|rU1%cutHAJQFEI2zr0aYj_34kOI^=3^y0HKGZuQ>)0096052DRd+cy9Jx%%P5