diff --git a/app-config.yaml b/app-config.yaml index 08d2abf..e338155 100644 --- a/app-config.yaml +++ b/app-config.yaml @@ -1,9 +1,9 @@ app: - title: Scaffolded Backstage App + title: CNOE baseUrl: http://localhost:3000 organization: - name: My Company + name: CNOE backend: # Used for enabling authentication, secret is shared by all backend plugins @@ -33,15 +33,15 @@ backend: # workingDirectory: /tmp # Use this to configure a working directory for the scaffolder, defaults to the OS temp-dir integrations: - github: - - host: github.com - # This is a Personal Access Token or PAT from GitHub. You can find out how to generate this token, and more information - # about setting up the GitHub integration here: https://backstage.io/docs/getting-started/configuration#setting-up-a-github-integration - token: ${GITHUB_TOKEN} - ### Example for how to add your GitHub Enterprise instance using the API: - # - host: ghe.example.net - # apiBaseUrl: https://ghe.example.net/api/v3 - # token: ${GHE_TOKEN} + gitea: + - baseUrl: https://gitea.cnoe.localtest.me:8443 + host: gitea.cnoe.localtest.me:8443 + username: giteaAdmin + password: giteaPassword + - baseUrl: https://gitea.cnoe.localtest.me + host: gitea.cnoe.localtest.me:8443 + username: giteaAdmin + password: giteaPassword proxy: ### Example for how to add a proxy endpoint for the frontend. @@ -64,33 +64,45 @@ techdocs: auth: # see https://backstage.io/docs/auth/ to learn about auth providers - providers: {} + environment: local # set this to development to enable SSO + session: + secret: abcdfkjalskdfjkla + providers: + keycloak-oidc: + development: + metadataUrl: https://keycloak.cnoe.localtest.me:8443/realms/cnoe/.well-known/openid-configuration + clientId: backstage + clientSecret: ${KEYCLOAK_CLIENT_SECRET} + scope: 'openid profile email groups' + prompt: auto scaffolder: # see https://backstage.io/docs/features/software-templates/configuration for software template options - + defaultAuthor: + name: backstage-scaffolder + email: noreply + defaultCommitMessage: "backstage scaffolder" catalog: import: entityFilename: catalog-info.yaml pullRequestBranchName: backstage-integration rules: - - allow: [Component, System, API, Resource, Location] + - allow: [ Component, System, API, Resource, Location, Template ] locations: - # Local example data, file locations are relative to the backend process, typically `packages/backend` - - type: file - target: ../../examples/entities.yaml + - type: url + target: https://gitea.cnoe.localtest.me:8443/giteaAdmin/entities/src/branch/main/catalog-info.yaml - # Local example template - - type: file - target: ../../examples/template/template.yaml - rules: - - allow: [Template] - - # Local example organizational data - - type: file - target: ../../examples/org.yaml - rules: - - allow: [User, Group] +# # Local example template +# - type: file +# target: ../../examples/template/template.yaml +# rules: +# - allow: [Template] +# +# # Local example organizational data +# - type: file +# target: ../../examples/org.yaml +# rules: +# - allow: [User, Group] ## Uncomment these lines to add more example data # - type: url @@ -101,3 +113,26 @@ catalog: # target: https://github.com/backstage/backstage/blob/master/packages/catalog-model/examples/acme-corp.yaml # rules: # - allow: [User, Group] +kubernetes: + serviceLocatorMethod: + type: 'multiTenant' + clusterLocatorMethods: + - type: 'config' + clusters: + - url: https://127.0.0.1:33277 # you may need to change this + name: local + authProvider: 'serviceAccount' + skipTLSVerify: true + # replace with your own service account token value. e.g. kubectl -n backstage exec -it deploy/backstage -- cat /var/run/secrets/kubernetes.io/serviceaccount/token + serviceAccountToken: eyJhbG...... +argocd: + appLocatorMethods: + - type: 'config' + instances: + - name: in-cluster + url: https://argocd.cnoe.localtest.me:8443 + username: admin + # replace with your argocd password e.g. kubectl -n argocd get secret argocd-initial-admin-secret -o jsonpath="{.data.password}" | base64 -d + password: ${ARGOCD_ADMIN_PASSWORD} +argoWorkflows: + baseUrl: https://argo.cnoe.localtest.me:8443 diff --git a/packages/app/package.json b/packages/app/package.json index 1169820..37f5c9b 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -14,36 +14,39 @@ "lint": "backstage-cli package lint" }, "dependencies": { - "@backstage/app-defaults": "^1.4.7", - "@backstage/catalog-model": "^1.4.3", - "@backstage/cli": "^0.25.1", - "@backstage/core-app-api": "^1.11.3", - "@backstage/core-components": "^0.13.10", - "@backstage/core-plugin-api": "^1.8.2", - "@backstage/integration-react": "^1.1.23", - "@backstage/plugin-api-docs": "^0.10.3", - "@backstage/plugin-catalog": "^1.16.1", - "@backstage/plugin-catalog-common": "^1.0.20", - "@backstage/plugin-catalog-graph": "^0.3.3", - "@backstage/plugin-catalog-import": "^0.10.5", - "@backstage/plugin-catalog-react": "^1.9.3", - "@backstage/plugin-github-actions": "^0.6.10", - "@backstage/plugin-home": "^0.6.1", - "@backstage/plugin-kubernetes": "^0.11.4", - "@backstage/plugin-org": "^0.6.19", - "@backstage/plugin-permission-react": "^0.4.19", - "@backstage/plugin-scaffolder": "^1.17.1", - "@backstage/plugin-search": "^1.4.5", - "@backstage/plugin-search-react": "^1.7.5", - "@backstage/plugin-tech-radar": "^0.6.12", - "@backstage/plugin-techdocs": "^1.9.3", - "@backstage/plugin-techdocs-module-addons-contrib": "^1.1.4", - "@backstage/plugin-techdocs-react": "^1.1.15", - "@backstage/plugin-user-settings": "^0.8.0", - "@backstage/theme": "^0.5.0", + "@backstage/app-defaults": "~1.4.7", + "@backstage/catalog-model": "~1.4.3", + "@backstage/cli": "~0.25.1", + "@backstage/core-app-api": "~1.11.3", + "@backstage/core-components": "~0.13.10", + "@backstage/core-plugin-api": "~1.8.2", + "@backstage/integration-react": "~1.1.23", + "@backstage/plugin-api-docs": "~0.10.3", + "@backstage/plugin-catalog": "~1.16.1", + "@backstage/plugin-catalog-common": "~1.0.20", + "@backstage/plugin-catalog-graph": "~0.3.3", + "@backstage/plugin-catalog-import": "~0.10.5", + "@backstage/plugin-catalog-react": "~1.9.3", + "@backstage/plugin-github-actions": "~0.6.10", + "@backstage/plugin-home": "~0.6.1", + "@backstage/plugin-kubernetes": "~0.11.4", + "@backstage/plugin-org": "~0.6.19", + "@backstage/plugin-permission-react": "~0.4.19", + "@backstage/plugin-scaffolder": "~1.17.1", + "@backstage/plugin-search": "~1.4.5", + "@backstage/plugin-search-react": "~1.7.5", + "@backstage/plugin-tech-radar": "~0.6.12", + "@backstage/plugin-techdocs": "~1.9.3", + "@backstage/plugin-techdocs-module-addons-contrib": "~1.1.4", + "@backstage/plugin-techdocs-react": "~1.1.15", + "@backstage/plugin-user-settings": "~0.8.0", + "@backstage/theme": "~0.5.0", + "@internal/plugin-apache-spark": "^0.1.0", + "@internal/plugin-argo-workflows": "^0.1.0", "@internal/plugin-cnoe-ui": "^0.1.0", "@material-ui/core": "^4.12.2", "@material-ui/icons": "^4.9.1", + "@roadiehq/backstage-plugin-argo-cd": "^2.5.1", "history": "^5.0.0", "react": "^18.0.2", "react-dom": "^18.0.2", diff --git a/packages/app/src/App.tsx b/packages/app/src/App.tsx index b8c215b..87faf6d 100644 --- a/packages/app/src/App.tsx +++ b/packages/app/src/App.tsx @@ -42,6 +42,8 @@ import { cnoeDarkTheme, } from '@internal/plugin-cnoe-ui'; import {configApiRef, useApi} from "@backstage/core-plugin-api"; +import { ArgoWorkflowsPage } from '@internal/plugin-argo-workflows'; +import { ApacheSparkPage } from '@internal/plugin-apache-spark'; const app = createApp({ apis, @@ -147,6 +149,8 @@ const routes = ( } /> } /> + } /> + } /> ); diff --git a/packages/app/src/components/catalog/EntityPage.tsx b/packages/app/src/components/catalog/EntityPage.tsx index 7c6a71e..30e5db9 100644 --- a/packages/app/src/components/catalog/EntityPage.tsx +++ b/packages/app/src/components/catalog/EntityPage.tsx @@ -58,6 +58,19 @@ import { import { TechDocsAddons } from '@backstage/plugin-techdocs-react'; import { ReportIssue } from '@backstage/plugin-techdocs-module-addons-contrib'; +import { EntityKubernetesContent } from '@backstage/plugin-kubernetes'; + +import { + EntityArgoCDOverviewCard, + isArgocdAvailable +} from '@roadiehq/backstage-plugin-argo-cd'; + +import { + EntityArgoWorkflowsOverviewCard, EntityArgoWorkflowsTemplateOverviewCard, + isArgoWorkflowsAvailable, +} from '@internal/plugin-argo-workflows'; +import {ApacheSparkPage, isApacheSparkAvailable} from "@internal/plugin-apache-spark"; + const techdocsContent = ( @@ -127,6 +140,23 @@ const overviewContent = ( + + Boolean(isArgocdAvailable(e))}> + + + + + + + isArgoWorkflowsAvailable(e)}> + + + + + + + + @@ -137,6 +167,7 @@ const overviewContent = ( + ); @@ -150,6 +181,14 @@ const serviceEntityPage = ( {cicdContent} + + + + + + + + diff --git a/packages/backend/package.json b/packages/backend/package.json index 2d8fff6..793c00c 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -41,6 +41,8 @@ "@backstage/plugin-search-backend-node": "~1.2.13", "@backstage/plugin-techdocs-backend": "~1.9.2", "@backstage/types": "~1.1.1", + "@roadiehq/backstage-plugin-argo-cd-backend": "~2.14.0", + "@roadiehq/scaffolder-backend-module-utils": "~1.13.1", "app": "link:../app", "better-sqlite3": "^9.0.0", "dockerode": "^3.3.1", diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index c4e585f..d81d859 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -33,7 +33,7 @@ import { ServerPermissionClient } from '@backstage/plugin-permission-node'; import { DefaultIdentityClient } from '@backstage/plugin-auth-node'; import kubernetes from './plugins/kubernetes'; - +import argocd from './plugins/argocd'; function makeCreateEnv(config: Config) { const root = getRootLogger(); const reader = UrlReaders.default({ logger: root, config }); @@ -89,6 +89,7 @@ async function main() { const appEnv = useHotMemoize(module, () => createEnv('app')); const kubernetesEnv = useHotMemoize(module, () => createEnv('kubernetes')); + const argocdEnv = useHotMemoize(module, () => createEnv('argocd')); const apiRouter = Router(); apiRouter.use('/catalog', await catalog(catalogEnv)); @@ -99,6 +100,7 @@ async function main() { apiRouter.use('/search', await search(searchEnv)); apiRouter.use('/kubernetes', await kubernetes(kubernetesEnv)); + apiRouter.use('/argocd', await argocd(argocdEnv)); // Add backends ABOVE this line; this 404 handler is the catch-all fallback apiRouter.use(notFoundHandler()); diff --git a/packages/backend/src/plugins/argocd.ts b/packages/backend/src/plugins/argocd.ts new file mode 100644 index 0000000..53fb00d --- /dev/null +++ b/packages/backend/src/plugins/argocd.ts @@ -0,0 +1,136 @@ +import {Config} from "@backstage/config"; +import {createTemplateAction} from "@backstage/plugin-scaffolder-node"; +import {examples} from "./gitea-actions"; +import {Logger} from "winston"; + +import { ArgoService } from '@roadiehq/backstage-plugin-argo-cd-backend'; + +import { createRouter } from '@roadiehq/backstage-plugin-argo-cd-backend'; +import { PluginEnvironment } from '../types'; + +export default async function createPlugin({ + logger, + config, + }: PluginEnvironment) { + return await createRouter({ logger, config }); +} + +export function createArgoCDApp(options: { + config: Config; + logger: Logger +}) { + const { config, logger } = options; + + return createTemplateAction<{ + repoUrl: string; + projectName?: string; + appName: string; + argoInstance: string; + path: string; + labelValue?: string; + appNamespace: string + }>({ + id: 'cnoe:create-argocd-app', + description: + 'creates argocd app', + examples, + schema: { + input: { + type: 'object', + required: ['repoUrl', 'projectName', 'appName', 'argoInstance', 'path', 'appNamespace'], + properties: { + repoUrl: { + title: 'Repository Location', + type: 'string', + }, + projectName: { + title: 'name of the project in argocd', + type: 'string', + }, + appName: { + title: 'application name in argocd', + type: 'string', + }, + appNamespace: { + title: 'application name in argocd', + type: 'string', + }, + argoInstance: { + title: 'backstage argocd instance name defined in app-config.yaml', + type: 'string', + }, + path: { + title: 'argocd spec path', + type: 'string', + }, + labelValue: { + title: 'for argocd plugin to locate this app', + type: 'string', + } + }, + }, + output: { + }, + }, + async handler(ctx) { + + const { + repoUrl, + projectName, + appName, + argoInstance, + path, + labelValue, + appNamespace + } = ctx.input; + + const argoUserName = + config.getOptionalString('argocd.username') ?? 'argocdUsername'; + const argoPassword = + config.getOptionalString('argocd.password') ?? 'argocdPassword'; + + const argoSvc = new ArgoService( + argoUserName, + argoPassword, + config, + logger, + ); + + const argocdConfig = config + .getConfigArray('argocd.appLocatorMethods') + .filter(element => element.getString('type') === 'config') + .reduce( + (acc: Config[], argoApp: Config) => + acc.concat(argoApp.getConfigArray('instances')), + [], + ) + .map(instance => ({ + name: instance.getString('name'), + url: instance.getString('url'), + token: instance.getOptionalString('token'), + username: instance.getOptionalString('username'), + password: instance.getOptionalString('password'), + })); + const matchedArgoInstance = argocdConfig.find( + argoHost => argoHost.name === argoInstance, + ); + if (!matchedArgoInstance) { + throw new Error(`Unable to find Argo instance named "${argoInstance}"`); + } + const token = + matchedArgoInstance.token || + (await argoSvc.getArgoToken(matchedArgoInstance)); + + await argoSvc.createArgoApplication({ + baseUrl: matchedArgoInstance.url, + argoToken: token, + appName: appName, + projectName: projectName ? projectName : appName, + namespace: appNamespace, + sourceRepo: repoUrl, + sourcePath: path, + labelValue: labelValue ? labelValue : appName, + }) + }, + }); +} diff --git a/packages/backend/src/plugins/gitea-actions.ts b/packages/backend/src/plugins/gitea-actions.ts index 8dd9ebc..81d7cbb 100644 --- a/packages/backend/src/plugins/gitea-actions.ts +++ b/packages/backend/src/plugins/gitea-actions.ts @@ -1,3 +1,4 @@ +// this is necessary until https://github.com/backstage/backstage/pull/21890/ is merged and released. import { InputError } from '@backstage/errors'; import { Config } from '@backstage/config'; import { @@ -6,7 +7,6 @@ import { ScmIntegrationRegistry, ScmIntegrations, } from '@backstage/integration'; import { - ActionContext, createTemplateAction, getRepoSourceDirectory, initRepoAndPush, @@ -232,32 +232,32 @@ function checkRequiredParams(repoUrl: URL, ...params: string[]) { } } } -const checkGiteaContentUrl = async ( - config: GiteaIntegrationConfig, - options: { - owner?: string; - repo: string; - defaultBranch?: string; - }, -): Promise => { - const { owner, repo, defaultBranch } = options; - let response: Response; - const getOptions: RequestInit = { - method: 'GET', - }; - - try { - response = await fetch( - `${config.baseUrl}/${owner}/${repo}/src/branch/${defaultBranch}`, - getOptions, - ); - } catch (e) { - throw new Error( - `Unable to get the repository: ${owner}/${repo} metadata , ${e}`, - ); - } - return response; -}; +// const checkGiteaContentUrl = async ( +// config: GiteaIntegrationConfig, +// options: { +// owner?: string; +// repo: string; +// defaultBranch?: string; +// }, +// ): Promise => { +// const { owner, repo, defaultBranch } = options; +// let response: Response; +// const getOptions: RequestInit = { +// method: 'GET', +// }; +// +// try { +// response = await fetch( +// `${config.baseUrl}/${owner}/${repo}/src/branch/${defaultBranch}`, +// getOptions, +// ); +// } catch (e) { +// throw new Error( +// `Unable to get the repository: ${owner}/${repo} metadata , ${e}`, +// ); +// } +// return response; +// }; const checkGiteaOrg = async ( config: GiteaIntegrationConfig, @@ -372,63 +372,58 @@ const generateCommitMessage = ( return msg; }; -/** - * Checks if the provided function can be executed within a specific period of time limit. - * @param fn - * @param timeLimit - */ -async function checkDurationLimit(fn: () => void, timeLimit: number): Promise { - - const startTime = process.hrtime(); - - // Call the function - await fn(); - - const endTime = process.hrtime(startTime); - const durationInMs = endTime[0] * 1000 + endTime[1] / 1e6; - - // Check if the duration exceeds the time limit - return durationInMs <= timeLimit; -} - -async function checkAvailabilityGiteaRepository( - integrationConfig: GiteaIntegrationConfig, - options: { - owner?: string; - repo: string; - defaultBranch: string; - ctx: ActionContext; - }, -) { - const { owner, repo, defaultBranch, ctx } = options; - const sleep = (ms: number | undefined) => new Promise(r => setTimeout(r, ms)); - let response: Response; - - const p = new Promise((resolve, reject) => { - setTimeout(async () => { - response = await checkGiteaContentUrl(integrationConfig, { - owner, - repo, - defaultBranch, - }); - - while (response.status !== 200) { - if (ctx.signal?.aborted) return; - await sleep(1000); - response = await checkGiteaContentUrl(integrationConfig, { - owner, - repo, - defaultBranch, - }); - } - resolve() - }, - 5000 - ) - }) - return p - -} +// async function checkDurationLimit(fn: () => void, timeLimit: number): Promise { +// +// const startTime = process.hrtime(); +// +// // Call the function +// await fn(); +// +// const endTime = process.hrtime(startTime); +// const durationInMs = endTime[0] * 1000 + endTime[1] / 1e6; +// +// // Check if the duration exceeds the time limit +// return durationInMs <= timeLimit; +// } +// +// async function checkAvailabilityGiteaRepository( +// integrationConfig: GiteaIntegrationConfig, +// options: { +// owner?: string; +// repo: string; +// defaultBranch: string; +// ctx: ActionContext; +// }, +// ) { +// const { owner, repo, defaultBranch, ctx } = options; +// const sleep = (ms: number | undefined) => new Promise(r => setTimeout(r, ms)); +// let response: Response; +// +// const p = new Promise((resolve, reject) => { +// setTimeout(async () => { +// response = await checkGiteaContentUrl(integrationConfig, { +// owner, +// repo, +// defaultBranch, +// }); +// +// while (response.status !== 200) { +// if (ctx.signal?.aborted) return; +// await sleep(1000); +// response = await checkGiteaContentUrl(integrationConfig, { +// owner, +// repo, +// defaultBranch, +// }); +// } +// resolve() +// }, +// 5000 +// ) +// }) +// return p +// +// } /** * Creates a new action that initializes a git repository using the content of the workspace. diff --git a/packages/backend/src/plugins/scaffolder.ts b/packages/backend/src/plugins/scaffolder.ts index ff49675..a577370 100644 --- a/packages/backend/src/plugins/scaffolder.ts +++ b/packages/backend/src/plugins/scaffolder.ts @@ -4,6 +4,23 @@ import { Router } from 'express'; import type { PluginEnvironment } from '../types'; import { ScmIntegrations } from '@backstage/integration'; import {createPublishGiteaAction} from "./gitea-actions"; +import {createArgoCDApp} from "./argocd"; + +import { + createZipAction, + createSleepAction, + createWriteFileAction, + createAppendFileAction, + createMergeJSONAction, + createMergeAction, + createParseFileAction, + createSerializeYamlAction, + createSerializeJsonAction, + createJSONataAction, + createYamlJSONataTransformAction, + createJsonJSONataTransformAction, + createReplaceInFileAction +} from '@roadiehq/scaffolder-backend-module-utils'; export default async function createPlugin( env: PluginEnvironment, @@ -24,12 +41,36 @@ export default async function createPlugin( integrations: integrations, config: env.config, } + const argocdOptions = { + config: env.config, + logger: env.logger + } - const cnoeActions = [createPublishGiteaAction(options)] + const cnoeActions = [ + createPublishGiteaAction(options), + createArgoCDApp(argocdOptions) + ] + + const roadieUtilActions = [ + createZipAction(), + createSleepAction(), + createWriteFileAction(), + createAppendFileAction(), + createMergeJSONAction({}), + createMergeAction(), + createParseFileAction(), + createSerializeYamlAction(), + createSerializeJsonAction(), + createJSONataAction(), + createYamlJSONataTransformAction(), + createJsonJSONataTransformAction(), + createReplaceInFileAction() + ] const actions = [ ...builtInActions, ...cnoeActions, + ...roadieUtilActions ]; return await createRouter({ diff --git a/plugins/apache-spark/.eslintrc.js b/plugins/apache-spark/.eslintrc.js new file mode 100644 index 0000000..e2a53a6 --- /dev/null +++ b/plugins/apache-spark/.eslintrc.js @@ -0,0 +1 @@ +module.exports = require('@backstage/cli/config/eslint-factory')(__dirname); diff --git a/plugins/apache-spark/README.md b/plugins/apache-spark/README.md new file mode 100644 index 0000000..976aba2 --- /dev/null +++ b/plugins/apache-spark/README.md @@ -0,0 +1,13 @@ +# apache-spark + +Welcome to the apache-spark plugin! + +_This plugin was created through the Backstage CLI_ + +## Getting started + +Your plugin has been added to the example app in this repository, meaning you'll be able to access it by running `yarn start` in the root directory, and then navigating to [/apache-spark](http://localhost:3000/apache-spark). + +You can also serve the plugin in isolation by running `yarn start` in the plugin directory. +This method of serving the plugin provides quicker iteration speed and a faster startup and hot reloads. +It is only meant for local development, and the setup for it can be found inside the [/dev](./dev) directory. diff --git a/plugins/apache-spark/dev/index.tsx b/plugins/apache-spark/dev/index.tsx new file mode 100644 index 0000000..5f2b474 --- /dev/null +++ b/plugins/apache-spark/dev/index.tsx @@ -0,0 +1,12 @@ +import React from 'react'; +import { createDevApp } from '@backstage/dev-utils'; +import { apacheSparkPlugin, ApacheSparkPage } from '../src/plugin'; + +createDevApp() + .registerPlugin(apacheSparkPlugin) + .addPage({ + element: , + title: 'Root Page', + path: '/apache-spark' + }) + .render(); diff --git a/plugins/apache-spark/package.json b/plugins/apache-spark/package.json new file mode 100644 index 0000000..7553c7f --- /dev/null +++ b/plugins/apache-spark/package.json @@ -0,0 +1,51 @@ +{ + "name": "@internal/plugin-apache-spark", + "version": "0.1.0", + "main": "src/index.ts", + "types": "src/index.ts", + "license": "Apache-2.0", + "private": true, + "publishConfig": { + "access": "public", + "main": "dist/index.esm.js", + "types": "dist/index.d.ts" + }, + "backstage": { + "role": "frontend-plugin" + }, + "sideEffects": false, + "scripts": { + "start": "backstage-cli package start", + "build": "backstage-cli package build", + "lint": "backstage-cli package lint", + "test": "backstage-cli package test", + "clean": "backstage-cli package clean", + "prepack": "backstage-cli package prepack", + "postpack": "backstage-cli package postpack" + }, + "dependencies": { + "@backstage/core-components": "^0.13.8", + "@backstage/core-plugin-api": "^1.8.2", + "@backstage/theme": "^0.5.0", + "@material-ui/core": "^4.9.13", + "@material-ui/icons": "^4.9.1", + "@material-ui/lab": "^4.0.0-alpha.61", + "react-use": "^17.2.4" + }, + "peerDependencies": { + "react": "^16.13.1 || ^17.0.0" + }, + "devDependencies": { + "@backstage/cli": "^0.25.1", + "@backstage/core-app-api": "^1.11.3", + "@backstage/dev-utils": "^1.0.26", + "@backstage/test-utils": "^1.4.7", + "@testing-library/jest-dom": "^5.10.1", + "@testing-library/react": "^12.1.3", + "@testing-library/user-event": "^14.0.0", + "msw": "^1.0.0" + }, + "files": [ + "dist" + ] +} diff --git a/plugins/apache-spark/src/api/index.test.ts b/plugins/apache-spark/src/api/index.test.ts new file mode 100644 index 0000000..20f775b --- /dev/null +++ b/plugins/apache-spark/src/api/index.test.ts @@ -0,0 +1,113 @@ +// import { ApacheSparkClient } from './index'; +// import { ApacheSpark } from './model'; +// +// const mockKubernetesApi = { +// proxy: jest.fn(), +// getClusters: jest.fn(), +// getObjectsByEntity: jest.fn(), +// getWorkloadsByEntity: jest.fn(), +// getCustomObjectsByEntity: jest.fn(), +// }; +// +// describe('ApacheSparkClient', () => { +// let apacheSparkClient: ApacheSparkClient; +// +// beforeEach(() => { +// apacheSparkClient = new ApacheSparkClient(mockKubernetesApi); +// }); +// +// afterEach(() => { +// jest.clearAllMocks(); +// }); +// +// it('should fetch Spark application logs', async () => { +// mockKubernetesApi.proxy.mockResolvedValue({ +// ok: true, +// text: () => { +// return 'logs'; +// }, +// }); +// const logs = await apacheSparkClient.getLogs( +// 'cluster1', +// 'spark-namespace', +// 'spark-pod-name', +// 'abc', +// ); +// expect(logs).toEqual('logs'); +// expect(mockKubernetesApi.proxy).toHaveBeenCalledWith({ +// clusterName: 'cluster1', +// path: '/api/v1/namespaces/spark-namespace/pods/spark-pod-name/log?tailLines=1000&container=abc', +// }); +// }); +// +// it('should throw error if Spark application logs are not fetched', async () => { +// mockKubernetesApi.proxy.mockResolvedValueOnce({ +// status: 500, +// statusText: 'Internal Server Error', +// ok: false, +// text: () => { +// return 'oh noes'; +// }, +// }); +// +// await expect( +// apacheSparkClient.getLogs( +// 'spark-app-name', +// 'spark-namespace', +// 'spark-pod-name', +// 'abc', +// ), +// ).rejects.toEqual( +// 'failed to fetch logs: 500, Internal Server Error, oh noes', +// ); +// }); +// +// // test getSparkApp method +// it('should fetch Spark application', async () => { +// // @ts-ignore +// const mockResponse: ApacheSpark = { +// apiVersion: 'sparkoperator.k8s.io/v1beta2', +// kind: 'SparkApplication', +// metadata: { +// name: 'spark-app-name', +// namespace: 'spark-namespace', +// labels: { +// app: 'spark-app-name', +// }, +// creationTimestamp: '2021-01-01T00:00:00Z', +// }, +// spec: { +// image: 'abc', +// mainApplicationFile: 'main.py', +// mode: 'cluster', +// sparkVersion: 'v3.1.1.', +// type: 'Python', +// driver: { +// cores: 1, +// }, +// executor: { +// cores: 1, +// }, +// }, +// status: { +// applicationState: { +// state: 'RUNNING', +// }, +// }, +// }; +// +// mockKubernetesApi.proxy.mockResolvedValue({ +// ok: true, +// text: () => { +// return JSON.stringify(mockResponse); +// }, +// }); +// +// const application = await apacheSparkClient.getSparkApp( +// 'spark-app-name', +// 'spark-namespace', +// 'abc', +// ); +// expect(application).toEqual(mockResponse); +// }); +// }); diff --git a/plugins/apache-spark/src/api/index.ts b/plugins/apache-spark/src/api/index.ts new file mode 100644 index 0000000..cda9454 --- /dev/null +++ b/plugins/apache-spark/src/api/index.ts @@ -0,0 +1,176 @@ +import { createApiRef } from '@backstage/core-plugin-api'; +import { ApacheSpark, ApacheSparkList, Pod } from './model'; +import { KubernetesApi } from '@backstage/plugin-kubernetes'; + +export const apacheSparkApiRef = createApiRef({ + id: 'plugin.apachespark', +}); + +const API_VERSION = 'sparkoperator.k8s.io/v1beta2'; +const SPARK_APP_PLURAL = 'sparkapplications'; +const K8s_API_TIMEOUT = 'timeoutSeconds'; + +export interface ApacheSparkApi { + getSparkApps( + clusterName: string | undefined, + namespace: string | undefined, + labels: string | undefined, + ): Promise; + + getSparkApp( + clusterName: string | undefined, + namespace: string | undefined, + name: string, + ): Promise; + + getLogs( + clusterName: string | undefined, + namespace: string | undefined, + podName: string, + containerName?: string | undefined, + tailLine?: number, + ): Promise; + + getContainers( + clusterName: string | undefined, + namespace: string | undefined, + podName: string, + ): Promise; +} + +export class ApacheSparkClient implements ApacheSparkApi { + private kubernetesApi: KubernetesApi; + constructor(kubernetesApi: KubernetesApi) { + this.kubernetesApi = kubernetesApi; + } + async getSparkApps( + clusterName: string | undefined, + namespace: string | undefined, + labels: string | undefined, + ): Promise { + const ns = namespace !== undefined ? namespace : 'default'; + const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}`; + const query = new URLSearchParams({ + [K8s_API_TIMEOUT]: '30', + }); + if (labels) { + query.set('labelSelector', labels); + } + const resp = await this.kubernetesApi.proxy({ + clusterName: + clusterName !== undefined ? clusterName : await this.getFirstCluster(), + path: `${path}?${query.toString()}`, + }); + + if (!resp.ok) { + return Promise.reject( + `failed to fetch resources: ${resp.status}, ${ + resp.statusText + }, ${await resp.text()}`, + ); + } + const out = JSON.parse(await resp.text()); + this.removeManagedField(out); + return out; + } + + async getSparkApp( + clusterName: string | undefined, + namespace: string | undefined, + name: string, + ): Promise { + const ns = namespace !== undefined ? namespace : 'default'; + const path = `/apis/${API_VERSION}/namespaces/${ns}/${SPARK_APP_PLURAL}/${name}`; + const resp = await this.kubernetesApi.proxy({ + clusterName: + clusterName !== undefined ? clusterName : await this.getFirstCluster(), + path: `${path}`, + }); + if (!resp.ok) { + return Promise.reject( + `failed to fetch resources: ${resp.status}, ${ + resp.statusText + }, ${await resp.text()}`, + ); + } + const out = JSON.parse(await resp.text()); + this.removeManagedField(out); + return out; + } + + async getLogs( + clusterName: string | undefined, + namespace: string | undefined, + podName: string, + containerName: string | undefined, + tailLine: number = 1000, + ): Promise { + const ns = namespace !== undefined ? namespace : 'default'; + const path = `/api/v1/namespaces/${ns}/pods/${podName}/log`; + const query = new URLSearchParams({ + tailLines: tailLine.toString(), + }); + if (containerName) { + query.set('container', containerName); + } + + const resp = await this.kubernetesApi.proxy({ + clusterName: + clusterName !== undefined ? clusterName : await this.getFirstCluster(), + path: `${path}?${query.toString()}`, + }); + if (!resp.ok) { + return Promise.reject( + `failed to fetch logs: ${resp.status}, ${ + resp.statusText + }, ${await resp.text()}`, + ); + } + return resp.text(); + } + + async getContainers( + clusterName: string | undefined, + namespace: string | undefined, + podName: string, + ): Promise { + const ns = namespace !== undefined ? namespace : 'default'; + const path = `/api/v1/namespaces/${ns}/pods/${podName}`; + const query = new URLSearchParams({ + [K8s_API_TIMEOUT]: '30', + }); + const resp = await this.kubernetesApi.proxy({ + clusterName: + clusterName !== undefined ? clusterName : await this.getFirstCluster(), + path: `${path}?${query.toString()}`, + }); + if (!resp.ok) { + throw new Error( + `failed to fetch logs: ${resp.status}, ${ + resp.statusText + }, ${await resp.text()}`, + ); + } + const pod = JSON.parse(await resp.text()) as Pod; + return pod.spec.containers.map(c => c.name); + } + + async getFirstCluster(): Promise { + const clusters = await this.kubernetesApi.getClusters(); + if (clusters.length > 0) { + return Promise.resolve(clusters[0].name); + } + return Promise.reject('no clusters found in configuration'); + } + + removeManagedField(spark: any) { + if (spark.metadata?.hasOwnProperty('managedFields')) { + delete spark.metadata.managedFields; + } + if (spark.items) { + for (const i of spark.items) { + this.removeManagedField(i); + } + } + } +} diff --git a/plugins/apache-spark/src/api/model.ts b/plugins/apache-spark/src/api/model.ts new file mode 100644 index 0000000..1d6455c --- /dev/null +++ b/plugins/apache-spark/src/api/model.ts @@ -0,0 +1,100 @@ +export type Metadata = { + name: string; + namespace?: string; + labels?: Record; + annotations?: Record; + creationTimestamp: string; + managedFields?: any; +}; + +export type Spec = { + arguments?: string[]; + batchScheduler?: string; + driver: { + coreLimit?: string; + coreRequest?: string; + cores?: number; + gpu?: { + name: string; + quantity: number; + }; + labels?: Record; + memory?: string; + memoryOverhead?: string; + podName?: string; + schedulerName?: string; + serviceAccount?: string; + }; + executor: { + coreLimit?: string; + coreRequest?: string; + cores?: number; + gpu?: { + name: string; + quantity: number; + }; + instances?: number; + labels?: Record; + memory?: string; + memoryOverhead?: string; + schedulerName?: string; + serviceAccount?: string; + }; + image: string; + mainClass?: string; + mainApplicationFile?: string; + mode: string; + pythonVersion?: string; + sparkVersion: string; + type: string; +}; + +export type Status = { + applicationState: { + errorMessage?: string; + state: string; + }; + driverInfo?: { + podName: string; + webUIAddress: string; + webUIIngressAddress: string; + webUIIngressName: string; + webUIPort: string; + webUIServiceName: string; + }; + executionAttempts?: number; + executorState?: { [key: string]: string }; + lastSubmissionAttemptTime?: string; + sparkApplicationId?: string; + submissionAttempts?: number; + submissionID?: string; + terminationTime?: string; +}; + +export type ApacheSpark = { + apiVersion: string; + kind: string; + metadata: Metadata; + spec: Spec; + status: Status; +}; + +export type ApacheSparkList = { + apiVersion: string; + kind: string; + items?: ApacheSpark[]; +}; + +export type Pod = { + apiVersion: string; + kind: string; + metadata: Metadata; + spec: PodSpec; +}; + +export type PodSpec = { + containers: { + image: string; + name: string; + }[]; +}; diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx new file mode 100644 index 0000000..01d3ade --- /dev/null +++ b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.test.tsx @@ -0,0 +1,83 @@ +import React from 'react'; +import { render, screen } from '@testing-library/react'; +import { useApi } from '@backstage/core-plugin-api'; +import { useEntity } from '@backstage/plugin-catalog-react'; +import useAsync from 'react-use/lib/useAsync'; +import { ApacheSpark } from '../../api/model'; +import { ApacheSparkDriverLogs } from './ApacheSparkLogs'; +import { + APACHE_SPARK_LABEL_SELECTOR_ANNOTATION, + CLUSTER_NAME_ANNOTATION, + K8S_NAMESPACE_ANNOTATION, +} from '../../consts'; + +jest.mock('@backstage/core-plugin-api'); +jest.mock('react-use/lib/useAsync'); +jest.mock('@backstage/plugin-catalog-react'); + +jest.mock('@backstage/core-components', () => ({ + LogViewer: (props: { text: string }) => { + return
{props.text}
; + }, +})); + +describe('ApacheSparkDriverLogs', () => { + const mockUseApi = useApi as jest.MockedFunction; + const mockUseAsync = useAsync as jest.MockedFunction; + const mockUseEntity = useEntity as jest.MockedFunction; + const mockGetLogs = jest.fn(); + const mockSparkApp = { + status: { + driverInfo: { + podName: 'test-pod', + }, + }, + } as ApacheSpark; + + beforeEach(() => { + mockUseApi.mockReturnValue({ + getLogs: mockGetLogs, + }); + mockUseEntity.mockReturnValue({ + entity: { + apiVersion: 'version', + kind: 'kind', + metadata: { + name: 'name', + namespace: 'ns1', + annotations: { + [K8S_NAMESPACE_ANNOTATION]: 'k8s-ns', + [CLUSTER_NAME_ANNOTATION]: 'my-cluster', + [APACHE_SPARK_LABEL_SELECTOR_ANNOTATION]: 'env=test', + }, + }, + }, + }); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should render error message if there is an error', () => { + mockUseAsync.mockReturnValue({ + value: undefined, + loading: false, + error: new Error('Test error'), + }); + + render(); + expect(screen.getByText('Error: Test error')).toBeInTheDocument(); + expect(screen.getByRole('alert')).toBeInTheDocument(); + }); + + it('should render the log viewer with the fetched logs', async () => { + mockUseAsync.mockReturnValue({ + value: 'test logs', + loading: false, + error: undefined, + }); + render(); + expect(screen.getByText('test logs')).toBeInTheDocument(); + }); +}); diff --git a/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx new file mode 100644 index 0000000..e892856 --- /dev/null +++ b/plugins/apache-spark/src/components/ApacheSparkLogs/ApacheSparkLogs.tsx @@ -0,0 +1,100 @@ +import { useApi } from '@backstage/core-plugin-api'; +import { apacheSparkApiRef } from '../../api'; +import useAsync from 'react-use/lib/useAsync'; +import { ApacheSpark } from '../../api/model'; +import { + LogViewer, + Progress, + Select, + SelectedItems, + SelectItem, +} from '@backstage/core-components'; +import Alert from '@material-ui/lab/Alert'; +import React, { useEffect, useState } from 'react'; +import { useEntity } from '@backstage/plugin-catalog-react'; +import { getAnnotationValues } from '../utils'; + +export const ApacheSparkDriverLogs = (props: { sparkApp: ApacheSpark }) => { + const apiClient = useApi(apacheSparkApiRef); + const { entity } = useEntity(); + const { ns, clusterName } = getAnnotationValues(entity); + + const { value, loading, error } = useAsync(async (): Promise => { + return await apiClient.getLogs( + clusterName, + ns, + props.sparkApp.status.driverInfo?.podName!, + 'spark-kubernetes-driver', + ); + }, [props]); + if (loading) { + return ; + } else if (error) { + return {`${error}`}; + } + return ; +}; + +const ExecutorLogs = (props: { name: string }) => { + const apiClient = useApi(apacheSparkApiRef); + const { entity } = useEntity(); + const [logs, setLogs] = useState(''); + const { ns, clusterName } = getAnnotationValues(entity); + + useEffect(() => { + async function getLogs() { + try { + const val = await apiClient.getLogs( + clusterName, + ns, + props.name, + 'spark-kubernetes-executor', + ); + setLogs(val); + } catch (e) { + if (typeof e === 'string') { + setLogs(e); + } + } + } + if (props.name !== '') { + getLogs(); + } + }, [apiClient, clusterName, ns, props]); + + return ; +}; + +export const ApacheSparkExecutorLogs = (props: { sparkApp: ApacheSpark }) => { + const [selected, setSelected] = useState(''); + if (props.sparkApp.status.applicationState.state !== 'RUNNING') { + return ( + + Executor logs are only available for Spark Applications in RUNNING state + + ); + } + const executors: SelectItem[] = [{ label: '', value: '' }]; + for (const key in props.sparkApp.status.executorState) { + if (props.sparkApp.status.executorState.hasOwnProperty(key)) { + executors.push({ label: key, value: key }); + } + } + + const handleChange = (item: SelectedItems) => { + if (typeof item === 'string' && item !== '') { + setSelected(item); + } + }; + return ( + <> +