Compare commits
No commits in common. "development" and "feature/argo-workflows" have entirely different histories.
developmen
...
feature/ar
76 changed files with 33431 additions and 40575 deletions
|
@ -1,6 +1,10 @@
|
|||
dist-types
|
||||
.git
|
||||
.yarn/cache
|
||||
.yarn/install-state.gz
|
||||
node_modules
|
||||
packages/*/dist
|
||||
packages/*/src
|
||||
packages/*/node_modules
|
||||
plugins/*/dist
|
||||
plugins/*/node_modules
|
||||
plugins
|
||||
*.local.yaml
|
||||
github-integration.yaml
|
||||
k8s-config.yaml
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
playwright.config.ts
|
51
.github/workflows/build-and-push.yaml
vendored
51
.github/workflows/build-and-push.yaml
vendored
|
@ -1,51 +0,0 @@
|
|||
name: ci
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
-
|
||||
name: Repository meta
|
||||
id: repository
|
||||
run: |
|
||||
registry=${{ github.server_url }}
|
||||
registry=${registry##http*://}
|
||||
echo "registry=${registry}" >> "$GITHUB_OUTPUT"
|
||||
echo "registry=${registry}"
|
||||
repository="$(echo "${{ github.repository }}" | tr '[:upper:]' '[:lower:]')"
|
||||
echo "repository=${repository}" >> "$GITHUB_OUTPUT"
|
||||
echo "repository=${repository}"
|
||||
-
|
||||
name: Docker meta
|
||||
uses: docker/metadata-action@v5
|
||||
id: docker
|
||||
with:
|
||||
images: ${{ steps.repository.outputs.registry }}/${{ steps.repository.outputs.repository }}
|
||||
-
|
||||
name: Login to registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ steps.repository.outputs.registry }}
|
||||
username: ${{ secrets.PACKAGES_USER }}
|
||||
password: ${{ secrets.PACKAGES_TOKEN }}
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
buildkitd-flags: '--allow-insecure-entitlement network.host'
|
||||
driver-opts: network=host
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
allow: network.host
|
||||
network: host
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.docker.outputs.tags }}
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -50,5 +50,5 @@ site
|
|||
# vscode database functionality support files
|
||||
*.session.sql
|
||||
|
||||
# E2E test reports
|
||||
e2e-test-report/
|
||||
# JetBrains
|
||||
.idea
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
nodeLinker: node-modules
|
94
Dockerfile
94
Dockerfile
|
@ -1,94 +0,0 @@
|
|||
# Stage 1 - Create yarn install skeleton layer
|
||||
FROM node:20.18.1 AS packages
|
||||
|
||||
WORKDIR /app
|
||||
COPY package.json yarn.lock ./
|
||||
|
||||
COPY packages packages
|
||||
|
||||
# Comment this out if you don't have any internal plugins
|
||||
COPY plugins plugins
|
||||
|
||||
RUN find packages \! -name "package.json" -mindepth 2 -maxdepth 2 -exec rm -rf {} \+
|
||||
|
||||
# Stage 2 - Install dependencies and build packages
|
||||
FROM node:20.18.1 AS build
|
||||
|
||||
# Required for arm64
|
||||
RUN apt update -y
|
||||
RUN apt install -y python3 make gcc build-essential bash
|
||||
|
||||
USER node
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=packages --chown=node:node /app .
|
||||
|
||||
RUN --mount=type=cache,target=/home/node/.cache/yarn,sharing=locked,uid=1000,gid=1000 \
|
||||
yarn install --network-timeout 600000
|
||||
|
||||
COPY --chown=node:node . .
|
||||
|
||||
RUN yarn tsc
|
||||
RUN yarn --cwd packages/backend build
|
||||
# If you have not yet migrated to package roles, use the following command instead:
|
||||
# RUN yarn --cwd packages/backend backstage-cli backend:bundle --build-dependencies
|
||||
|
||||
RUN mkdir packages/backend/dist/skeleton packages/backend/dist/bundle \
|
||||
&& tar xzf packages/backend/dist/skeleton.tar.gz -C packages/backend/dist/skeleton \
|
||||
&& tar xzf packages/backend/dist/bundle.tar.gz -C packages/backend/dist/bundle
|
||||
|
||||
# Stage 3 - Build the actual backend image and install production dependencies
|
||||
FROM node:20.18.1
|
||||
|
||||
# Install isolate-vm dependencies, these are needed by the @backstage/plugin-scaffolder-backend.
|
||||
# Install packages needed to get utility binaries
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends python3 python3-pip python3-venv g++ build-essential ca-certificates curl
|
||||
|
||||
RUN yarn config set python /usr/bin/python3
|
||||
|
||||
# Add kubectl for the kube apply plugin.
|
||||
# Add mkdocs for the TechDocs plugin.
|
||||
RUN if test "$(uname -m)" = "x86_64"; \
|
||||
then \
|
||||
curl -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/v1.29.9/bin/linux/amd64/kubectl; \
|
||||
fi
|
||||
RUN if test "$(uname -m)" != "x86_64"; \
|
||||
then \
|
||||
curl -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/v1.29.9/bin/linux/arm64/kubectl; \
|
||||
fi
|
||||
RUN chmod +x /usr/local/bin/kubectl
|
||||
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
RUN python3 -m venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
RUN pip3 install 'mkdocs-techdocs-core==1.4.2' 'mkdocs-awesome-pages-plugin==2.10.1'
|
||||
|
||||
# From here on we use the least-privileged `node` user to run the backend.
|
||||
USER node
|
||||
|
||||
# This should create the app dir as `node`.
|
||||
# If it is instead created as `root` then the `tar` command below will
|
||||
# fail: `can't create directory 'packages/': Permission denied`.
|
||||
# If this occurs, then ensure BuildKit is enabled (`DOCKER_BUILDKIT=1`)
|
||||
# so the app dir is correctly created as `node`.
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the install dependencies from the build stage and context
|
||||
COPY --from=build --chown=node:node /app/yarn.lock /app/package.json /app/packages/backend/dist/skeleton/ ./
|
||||
|
||||
RUN --mount=type=cache,target=/home/node/.cache/yarn,sharing=locked,uid=1000,gid=1000 \
|
||||
yarn install --production --network-timeout 600000
|
||||
|
||||
# Copy the built packages from the build stage
|
||||
COPY --from=build --chown=node:node /app/packages/backend/dist/bundle/ ./
|
||||
|
||||
# Copy any other files that we need at runtime
|
||||
COPY --chown=node:node app-config.yaml ./
|
||||
|
||||
# This switches many Node.js dependencies to production mode.
|
||||
ENV NODE_ENV production
|
||||
|
||||
CMD ["node", "packages/backend", "--config", "app-config.yaml"]
|
116
README.md
116
README.md
|
@ -1,116 +1,10 @@
|
|||
# EDP Backstage
|
||||
# [Backstage](https://backstage.io)
|
||||
|
||||
The EDP bespoke version of backstage.
|
||||
This is your newly scaffolded Backstage App, Good Luck!
|
||||
|
||||
With respect to the CNOE stack (where eDF originates from) it is comparable to https://github.com/cnoe-io/backstage-app
|
||||
To start the app, run:
|
||||
|
||||
At the time writing CNOE-backstage-app is "version": "1.28.4"
|
||||
|
||||
## Container Images
|
||||
|
||||
Container images are pushed to the Cefor Container Registry and available [here](https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/DevFW-CICD/-/packages/container/backstage-edp/).
|
||||
|
||||
|
||||
## Local Development
|
||||
|
||||
Use of [**edpbuilder**](https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/DevFW/edpbuilder.git) is recommended for local setup.
|
||||
|
||||
### Create your local cluster
|
||||
|
||||
Once edpbuilder is installed on your computer, create a stack that you are interested in. For example:
|
||||
|
||||
> Hint: From here on this is the old CNOE README .... no guarantee that this works as described!
|
||||
|
||||
### Update Backstage application config
|
||||
|
||||
Once all ArgoCD applications are healthy, you need to update a few fields in the [app-config.yaml](./app-config.yaml) file.
|
||||
|
||||
#### Update control plane URL
|
||||
|
||||
The control plane port must be updated every time a cluster is created. Run the `kubectl cluster-info` command to get the control plane URL. Once you have your URL, update your `app-config.yaml` file at [this line](https://github.com/cnoe-io/backstage-app/blob/9ee3514e51c1a354b7fe85a90117faf8328bfa0b/app-config.yaml#L122).
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
$ kubectl cluster-info
|
||||
|
||||
Kubernetes control plane is running at https://127.0.0.1:36463
|
||||
CoreDNS is running at https://127.0.0.1:36463/api/v1/namespaces/kube-system/services/kube-dns:dns/proxy
|
||||
```
|
||||
|
||||
For this particular example output, the `https://127.0.0.1:36463` above is the URL you need to use in your `app-config.yaml`.
|
||||
|
||||
#### Update service account token
|
||||
|
||||
Since tokens are generated each time the backstage service account is created, you need to update this value as well. The command to retrieve the service account token is:
|
||||
|
||||
`kubectl -n backstage exec -it deploy/backstage -- cat /var/run/secrets/kubernetes.io/serviceaccount/token`
|
||||
|
||||
Copy the token value and updated the app-config file at [this line](https://github.com/cnoe-io/backstage-app/blob/main/app-config.yaml#L127).
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
$ kubectl -n backstage exec -it deploy/backstage -- cat /var/run/secrets/kubernetes.io/serviceaccount/token
|
||||
|
||||
eyJhbGciOiJSUzI1NiIsImtpZCI6IkRxbDRCSnNicjFwekFqdmxwNDc5MHJqeUlFSjhxNHU0LV95OC1s...
|
||||
```
|
||||
|
||||
If you do not want to place the token value in your file, you can use environment variables instead:
|
||||
1. Set [this line](https://github.com/cnoe-io/backstage-app/blob/main/app-config.yaml#L127) value to be `${BACKSTAGE_SA_TOKEN}`.
|
||||
2. Then export the token value:
|
||||
```bash
|
||||
export BACKSTAGE_SA_TOKEN=$(kubectl -n backstage exec -it deploy/backstage -- cat /var/run/secrets/kubernetes.io/serviceaccount/token)
|
||||
```
|
||||
|
||||
#### Update ArgoCD token
|
||||
|
||||
ArgoCD admin passwords are generated on each fresh installation. You need to update the configuration file accordingly. To obtain your password, run: `./idpbuilder get secrets -p argocd`. Then update [this line](https://github.com/cnoe-io/backstage-app/blob/9ee3514e51c1a354b7fe85a90117faf8328bfa0b/app-config.yaml#L136)
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
$ ./idpbuilder get secrets -p argocd
|
||||
|
||||
---------------------------
|
||||
Name: argocd-initial-admin-secret
|
||||
Namespace: argocd
|
||||
Data:
|
||||
password : abc
|
||||
username : admin
|
||||
```
|
||||
|
||||
#### Update Gitea Credentials
|
||||
|
||||
Gitea admin passwords are generated on each fresh installation as well. To obtain your password, run: `./idpbuilder get secrets -p argocd`.
|
||||
Then update [this line](https://github.com/cnoe-io/backstage-app/blob/9ee3514e51c1a354b7fe85a90117faf8328bfa0b/app-config.yaml#L40) and [this line](https://github.com/cnoe-io/backstage-app/blob/9ee3514e51c1a354b7fe85a90117faf8328bfa0b/app-config.yaml#L44).
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
$ ./idpbuilder get secrets -p gitea
|
||||
|
||||
---------------------------
|
||||
Name: gitea-credential
|
||||
Namespace: gitea
|
||||
Data:
|
||||
password : abc
|
||||
username : giteaAdmin
|
||||
````
|
||||
|
||||
### Start Backstage processes
|
||||
|
||||
Once the `app-config.yaml` file is updated, you are ready to start your backstage instance. For development purposes, using two terminal windows or tabs is recommended. You can also run them through your favorite IDE.
|
||||
|
||||
In the first terminal tab, install dependencies and start the backend.
|
||||
|
||||
```bash
|
||||
```sh
|
||||
yarn install
|
||||
yarn run start-backend
|
||||
```
|
||||
|
||||
In the first terminal tab, run the frontend.
|
||||
|
||||
```bash
|
||||
yarn run start
|
||||
yarn dev
|
||||
```
|
||||
|
|
|
@ -30,6 +30,6 @@ backend:
|
|||
|
||||
catalog:
|
||||
# Overrides the default list locations from app-config.yaml as these contain example data.
|
||||
# See https://backstage.io/docs/features/software-catalog/#adding-components-to-the-catalog for more details
|
||||
# See https://backstage.io/docs/features/software-catalog/software-catalog-overview#adding-components-to-the-catalog for more details
|
||||
# on how to get entities into the catalog.
|
||||
locations: []
|
||||
|
|
108
app-config.yaml
108
app-config.yaml
|
@ -1,13 +1,13 @@
|
|||
app:
|
||||
title: CNOE
|
||||
title: Scaffolded Backstage App
|
||||
baseUrl: http://localhost:3000
|
||||
|
||||
organization:
|
||||
name: CNOE
|
||||
name: My Company
|
||||
|
||||
backend:
|
||||
# Used for enabling authentication, secret is shared by all backend plugins
|
||||
# See https://backstage.io/docs/auth/service-to-service-auth for
|
||||
# See https://backstage.io/docs/tutorials/backend-to-backend-auth for
|
||||
# information on the format
|
||||
# auth:
|
||||
# keys:
|
||||
|
@ -30,25 +30,30 @@ backend:
|
|||
database:
|
||||
client: better-sqlite3
|
||||
connection: ':memory:'
|
||||
cache:
|
||||
store: memory
|
||||
# workingDirectory: /tmp # Use this to configure a working directory for the scaffolder, defaults to the OS temp-dir
|
||||
|
||||
integrations:
|
||||
gitea:
|
||||
- baseUrl: https://cnoe.localtest.me:8443/gitea
|
||||
host: cnoe.localtest.me:8443
|
||||
username: giteaAdmin
|
||||
password: ${GITEA_PASSWORD}
|
||||
- baseUrl: https://cnoe.localtest.me/gitea
|
||||
host: cnoe.localtest.me
|
||||
username: giteaAdmin
|
||||
password: ${GITEA_PASSWORD}
|
||||
github:
|
||||
- host: github.com
|
||||
apps:
|
||||
- $include: github-integration.yaml
|
||||
# - host: github.com
|
||||
# # This is a Personal Access Token or PAT from GitHub. You can find out how to generate this token, and more information
|
||||
# # about setting up the GitHub integration here: https://backstage.io/docs/getting-started/configuration#setting-up-a-github-integration
|
||||
# token: ${GITHUB_TOKEN}
|
||||
### Example for how to add your GitHub Enterprise instance using the API:
|
||||
# - host: ghe.example.net
|
||||
# apiBaseUrl: https://ghe.example.net/api/v3
|
||||
# token: ${GHE_TOKEN}
|
||||
|
||||
proxy:
|
||||
### Example for how to add a proxy endpoint for the frontend.
|
||||
### A typical reason to do this is to handle HTTPS and CORS for internal services.
|
||||
# endpoints:
|
||||
# '/test':
|
||||
# target: 'https://example.com'
|
||||
# changeOrigin: true
|
||||
# '/test':
|
||||
# target: 'https://example.com'
|
||||
# changeOrigin: true
|
||||
|
||||
# Reference documentation http://backstage.io/docs/features/techdocs/configuration
|
||||
# Note: After experimenting with basic setup, use CI/CD to generate docs
|
||||
|
@ -60,46 +65,39 @@ techdocs:
|
|||
runIn: 'docker' # Alternatives - 'local'
|
||||
publisher:
|
||||
type: 'local' # Alternatives - 'googleGcs' or 'awsS3'. Read documentation for using alternatives.
|
||||
|
||||
auth:
|
||||
# see https://backstage.io/docs/auth/ to learn about auth providers
|
||||
environment: local # set this to development to enable SSO
|
||||
session:
|
||||
secret: abcdfkjalskdfjkla
|
||||
providers:
|
||||
guest: {}
|
||||
keycloak-oidc:
|
||||
development:
|
||||
metadataUrl: https://cnoe.localtest.me:8443/keycloak/realms/cnoe/.well-known/openid-configuration
|
||||
clientId: backstage
|
||||
clientSecret: ${KEYCLOAK_CLIENT_SECRET}
|
||||
prompt: auto
|
||||
providers: {}
|
||||
|
||||
scaffolder:
|
||||
# see https://backstage.io/docs/features/software-templates/configuration for software template options
|
||||
defaultAuthor:
|
||||
name: backstage-scaffolder
|
||||
email: noreply
|
||||
defaultCommitMessage: "backstage scaffolder"
|
||||
|
||||
catalog:
|
||||
import:
|
||||
entityFilename: catalog-info.yaml
|
||||
pullRequestBranchName: backstage-integration
|
||||
rules:
|
||||
- allow: [ Component, System, API, Resource, Location, Template ]
|
||||
- allow: [Component, System, API, Resource, Location, Template]
|
||||
locations:
|
||||
- type: url
|
||||
target: https://cnoe.localtest.me:8443/gitea/giteaAdmin/idpbuilder-localdev-backstage-templates-entities/src/branch/main/catalog-info.yaml
|
||||
# # Local example template
|
||||
# - type: file
|
||||
# target: ../../examples/template/template.yaml
|
||||
# rules:
|
||||
# - allow: [Template]
|
||||
#
|
||||
# # Local example organizational data
|
||||
# - type: file
|
||||
# target: ../../examples/org.yaml
|
||||
# rules:
|
||||
# - allow: [User, Group]
|
||||
# Local example data, file locations are relative to the backend process, typically `packages/backend`
|
||||
- type: file
|
||||
target: ../../examples/entities.yaml
|
||||
- type: file
|
||||
target: /Users/mccloman/repos/backstage-templates/template1.yaml
|
||||
- type: file
|
||||
target: /Users/mccloman/repos/backstage-app/backstage/test-template.yaml
|
||||
# Local example template
|
||||
- type: file
|
||||
target: ../../examples/template/template.yaml
|
||||
rules:
|
||||
- allow: [Template]
|
||||
|
||||
# Local example organizational data
|
||||
- type: file
|
||||
target: ../../examples/org.yaml
|
||||
rules:
|
||||
- allow: [User, Group]
|
||||
|
||||
## Uncomment these lines to add more example data
|
||||
# - type: url
|
||||
|
@ -114,22 +112,4 @@ kubernetes:
|
|||
serviceLocatorMethod:
|
||||
type: 'multiTenant'
|
||||
clusterLocatorMethods:
|
||||
- type: 'config'
|
||||
clusters:
|
||||
- url: https://127.0.0.1:33277 # you may need to change this
|
||||
name: local
|
||||
authProvider: 'serviceAccount'
|
||||
skipTLSVerify: true
|
||||
# replace with your own service account token value. e.g. kubectl -n backstage exec -it deploy/backstage -- cat /var/run/secrets/kubernetes.io/serviceaccount/token
|
||||
serviceAccountToken: eyJhbG......
|
||||
argocd:
|
||||
appLocatorMethods:
|
||||
- type: 'config'
|
||||
instances:
|
||||
- name: local
|
||||
url: https://cnoe.localtest.me:8443/argocd
|
||||
username: admin
|
||||
# replace with your argocd password e.g. kubectl -n argocd get secret argocd-initial-admin-secret -o jsonpath="{.data.password}" | base64 -d
|
||||
password: ${ARGOCD_ADMIN_PASSWORD}
|
||||
argoWorkflows:
|
||||
baseUrl: https://cnoe.localtest.me:8443/argo-workflows
|
||||
- $include: k8s-config.yaml
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
{
|
||||
"version": "1.36.1"
|
||||
"version": "1.14.2"
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
apiVersion: backstage.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: backstage-idpbuilder
|
||||
name: backstage
|
||||
description: An example of a Backstage application.
|
||||
# Example for optional annotations
|
||||
# annotations:
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SERVICE_ACCOUNT_DIR="/var/run/secrets/kubernetes.io/serviceaccount"
|
||||
KUBERNETES_SERVICE_SCHEME=$(case $KUBERNETES_SERVICE_PORT in 80|8080|8081) echo "http";; *) echo "https"; esac)
|
||||
KUBERNETES_SERVER_URL="$KUBERNETES_SERVICE_SCHEME"://"$KUBERNETES_SERVICE_HOST":"$KUBERNETES_SERVICE_PORT"
|
||||
KUBERNETES_CLUSTER_CA_FILE="$SERVICE_ACCOUNT_DIR"/ca.crt
|
||||
KUBERNETES_NAMESPACE=$(cat "$SERVICE_ACCOUNT_DIR"/namespace)
|
||||
KUBERNETES_USER_TOKEN=$(cat "$SERVICE_ACCOUNT_DIR"/token)
|
||||
KUBERNETES_CONTEXT="inCluster"
|
||||
|
||||
rm -rf "$HOME"/.kube
|
||||
mkdir -p "$HOME"/.kube
|
||||
cat << EOF > "$HOME"/.kube/config
|
||||
apiVersion: v1
|
||||
kind: Config
|
||||
preferences: {}
|
||||
current-context: $KUBERNETES_CONTEXT
|
||||
clusters:
|
||||
- cluster:
|
||||
server: $KUBERNETES_SERVER_URL
|
||||
certificate-authority: $KUBERNETES_CLUSTER_CA_FILE
|
||||
name: inCluster
|
||||
users:
|
||||
- name: podServiceAccount
|
||||
user:
|
||||
token: $KUBERNETES_USER_TOKEN
|
||||
contexts:
|
||||
- context:
|
||||
cluster: inCluster
|
||||
user: podServiceAccount
|
||||
namespace: $KUBERNETES_NAMESPACE
|
||||
name: $KUBERNETES_CONTEXT
|
||||
EOF
|
||||
|
||||
cnoe-cli "$@"
|
|
@ -1,17 +0,0 @@
|
|||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: game-demo
|
||||
data:
|
||||
# property-like keys; each key maps to a simple value
|
||||
player_initial_lives: "3"
|
||||
ui_properties_file_name: "user-interface.properties"
|
||||
|
||||
# file-like keys
|
||||
game.properties: |
|
||||
enemy.types=aliens,monsters
|
||||
player.maximum-lives=5
|
||||
user-interface.properties: |
|
||||
color.good=purple
|
||||
color.bad=yellow
|
||||
allow.textmode=true
|
|
@ -1,41 +0,0 @@
|
|||
apiVersion: scaffolder.backstage.io/v1beta3
|
||||
kind: Template
|
||||
metadata:
|
||||
name: deploy-resources-object
|
||||
title: Deploy Resources using object
|
||||
description: Deploy Resource to Kubernetes
|
||||
spec:
|
||||
owner: guest
|
||||
type: service
|
||||
# these are the steps which are rendered in the frontend with the form input
|
||||
parameters: []
|
||||
steps:
|
||||
- id: template
|
||||
name: Generating component
|
||||
action: fetch:template
|
||||
input:
|
||||
url: ./skeleton
|
||||
- id: apply
|
||||
name: apply-manifest
|
||||
action: cnoe:kubernetes:apply
|
||||
input:
|
||||
namespaced: true
|
||||
manifestObject:
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: game-demo
|
||||
data:
|
||||
# property-like keys; each key maps to a simple value
|
||||
player_initial_lives: "3"
|
||||
ui_properties_file_name: "user-interface.properties"
|
||||
|
||||
# file-like keys
|
||||
game.properties: |
|
||||
enemy.types=aliens,monsters
|
||||
player.maximum-lives=5
|
||||
user-interface.properties: |
|
||||
color.good=purple
|
||||
color.bad=yellow
|
||||
allow.textmode=true
|
||||
clusterName: local
|
|
@ -1,41 +0,0 @@
|
|||
apiVersion: scaffolder.backstage.io/v1beta3
|
||||
kind: Template
|
||||
metadata:
|
||||
name: deploy-resources-string
|
||||
title: Deploy Resources using literal string
|
||||
description: Deploy Resource to Kubernetes
|
||||
spec:
|
||||
owner: guest
|
||||
type: service
|
||||
# these are the steps which are rendered in the frontend with the form input
|
||||
parameters: []
|
||||
steps:
|
||||
- id: template
|
||||
name: Generating component
|
||||
action: fetch:template
|
||||
input:
|
||||
url: ./skeleton
|
||||
- id: apply
|
||||
name: apply-manifest
|
||||
action: cnoe:kubernetes:apply
|
||||
input:
|
||||
namespaced: true
|
||||
manifestString: |
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: game-demo
|
||||
data:
|
||||
# property-like keys; each key maps to a simple value
|
||||
player_initial_lives: "3"
|
||||
ui_properties_file_name: "user-interface.properties"
|
||||
|
||||
# file-like keys
|
||||
game.properties: |
|
||||
enemy.types=aliens,monsters
|
||||
player.maximum-lives=5
|
||||
user-interface.properties: |
|
||||
color.good=purple
|
||||
color.bad=yellow
|
||||
allow.textmode=true
|
||||
clusterName: local
|
|
@ -1,30 +0,0 @@
|
|||
apiVersion: scaffolder.backstage.io/v1beta3
|
||||
kind: Template
|
||||
metadata:
|
||||
name: deploy-resources
|
||||
title: Deploy Resources
|
||||
description: Deploy Resource to Kubernetes
|
||||
spec:
|
||||
owner: guest
|
||||
type: service
|
||||
# these are the steps which are rendered in the frontend with the form input
|
||||
parameters:
|
||||
- title: file name
|
||||
properties:
|
||||
path:
|
||||
type: string
|
||||
description: file name
|
||||
default: cm.yaml
|
||||
steps:
|
||||
- id: template
|
||||
name: Generating component
|
||||
action: fetch:template
|
||||
input:
|
||||
url: ./skeleton
|
||||
- id: apply
|
||||
name: apply-manifest
|
||||
action: cnoe:kubernetes:apply
|
||||
input:
|
||||
namespaced: true
|
||||
manifestPath: cm.yaml
|
||||
clusterName: local
|
9
github-integration.yaml
Normal file
9
github-integration.yaml
Normal file
|
@ -0,0 +1,9 @@
|
|||
appId: 123456
|
||||
webhookUrl: https://somehwere
|
||||
clientId: some.id
|
||||
clientSecret: ""
|
||||
webhookSecret: ""
|
||||
privateKey: |
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
|
||||
-----END RSA PRIVATE KEY-----
|
16
k8s-config.yaml
Normal file
16
k8s-config.yaml
Normal file
|
@ -0,0 +1,16 @@
|
|||
type: 'config'
|
||||
clusters:
|
||||
- url: https://3CEBA3CA7870A3E5BFE2CF3FA173EE56.gr7.us-west-2.eks.amazonaws.com:443
|
||||
name: canoe-packaging
|
||||
authProvider: 'serviceAccount'
|
||||
skipTLSVerify: false
|
||||
skipMetricsLookup: true
|
||||
serviceAccountToken: ""
|
||||
# dashboardUrl: http://127.0.0.1:64713 # url copied from running the command: minikube service kubernetes-dashboard -n kubernetes-dashboard
|
||||
# dashboardApp: standard
|
||||
caData: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMvakNDQWVhZ0F3SUJBZ0lCQURBTkJna3Foa2lHOXcwQkFRc0ZBREFWTVJNd0VRWURWUVFERXdwcmRXSmwKY201bGRHVnpNQjRYRFRJek1ERXlOVEl3TkRBMU5Wb1hEVE16TURFeU1qSXdOREExTlZvd0ZURVRNQkVHQTFVRQpBeE1LYTNWaVpYSnVaWFJsY3pDQ0FTSXdEUVlKS29aSWh2Y05BUUVCQlFBRGdnRVBBRENDQVFvQ2dnRUJBTlJvCnU5dkl6cjZmVEk4RThyR0Q2RHNoLzhyK0lkWmFHZGxsUytKbDN0Q2JteTVYUU15NnpOMU5acG1zRHpDTC9nUlIKS0s5WTVhUmRUWjFLdklkekRMQXdMeXpqODk5clJtYjB2aXUzR0ZQdDcxSWFYMEp1VmQwaTBrQit5Y01jSFo2QgpjOGhmMUErM1I2VVpCZDZsaUx0dG5pUjZwb29oYXdobG5DSEN4L1oyd014YWEvU21SUWxDMjhhTEhLZC9ZU0s2CndXS1VOQmVTMmpGZGc5bVVkcnJDREx5MkxqUTNUcUtPVW9PNEQ3bm9rVTh1NUFtejhldWFxdzR4U25ZMExucmsKWVk1MmhvOW5qRnZwOE5WQnE1VjRPUFVXaEhvQXE4TnZjZlVITkNSdWZkN09FZG85Y2t1Q1B3VzFiZWxNOW9oeApURFAvWFlsS09INFVQTDFHeUJFQ0F3RUFBYU5aTUZjd0RnWURWUjBQQVFIL0JBUURBZ0trTUE4R0ExVWRFd0VCCi93UUZNQU1CQWY4d0hRWURWUjBPQkJZRUZOeUgrRTZxb2VMTlVEVkl4ZXpTSjk3STRoZytNQlVHQTFVZEVRUU8KTUF5Q0NtdDFZbVZ5Ym1WMFpYTXdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBRDVoeStNaDBRdHJ6dG5vV0tFRgpTaFFsanE1cjJGYUZablAyYU9OWS9uaHNxdThjSmZkbWFyQUtsR1JkRTBocnVoaGprdE55ckdmcEZ5d1ErR0hhClR4d0N6NW9uUEhYaTRNZnBadEpaNzZYSERtT3BFR2diSTFhL0VCQUV2YkxHSVRWT3NTMmQ2MTFKTTF0bkJKRFgKNERFaVc5aXJ1Nm1wR2NaQ1JWYlhUT005cHV1V0NTQ1pPNktKZ29NZlVMbnpHT0diN0ludmtoajBJZThQQ0JGWQpWUmFvRm5NNE5HMUdHMnpuckcrNjFucFlBbGpGcjhQN2J4WmRsWWpPcjFGbFhydU1UeEdEZEpNYkNTcFViRmRUCkxOOVUxYlFNS3JBN3NsZEJCcTc0ZHlUZkNKZDFQaGdMSzZZbVZGdFo3Vmk4eFkwbjlpa2svZEpDWjM5aTFWR2wKK3NzPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
|
||||
# caFile: '' # local path to CA file
|
||||
customResources:
|
||||
- group: 'argoproj.io'
|
||||
apiVersion: 'v1alpha1'
|
||||
plural: 'applications'
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"packages": ["packages/*", "plugins/*"],
|
||||
"npmClient": "yarn",
|
||||
"version": "0.1.0",
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json"
|
||||
"useWorkspaces": true,
|
||||
"version": "0.1.0"
|
||||
}
|
||||
|
|
18
package.json
18
package.json
|
@ -3,7 +3,7 @@
|
|||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": "18 || 20"
|
||||
"node": "16 || 18"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "concurrently \"yarn start\" \"yarn start-backend\"",
|
||||
|
@ -17,8 +17,6 @@
|
|||
"clean": "backstage-cli repo clean",
|
||||
"test": "backstage-cli repo test",
|
||||
"test:all": "backstage-cli repo test --coverage",
|
||||
"test:e2e": "playwright test",
|
||||
"fix": "backstage-cli repo fix",
|
||||
"lint": "backstage-cli repo lint --since origin/main",
|
||||
"lint:all": "backstage-cli repo lint",
|
||||
"prettier:check": "prettier --check .",
|
||||
|
@ -31,19 +29,17 @@
|
|||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@backstage/cli": "^0.30.0",
|
||||
"@backstage/e2e-test-utils": "^0.1.1",
|
||||
"@playwright/test": "^1.32.3",
|
||||
"@backstage/cli": "^0.22.7",
|
||||
"@spotify/prettier-config": "^12.0.0",
|
||||
"concurrently": "^8.0.0",
|
||||
"lerna": "^7.3.0",
|
||||
"concurrently": "^6.0.0",
|
||||
"lerna": "^4.0.0",
|
||||
"node-gyp": "^9.0.0",
|
||||
"prettier": "^2.3.2",
|
||||
"typescript": "~5.2.0"
|
||||
"typescript": "~4.6.4"
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18"
|
||||
"@types/react": "^17",
|
||||
"@types/react-dom": "^17"
|
||||
},
|
||||
"prettier": "@spotify/prettier-config",
|
||||
"lint-staged": {
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
public
|
6
packages/app/cypress.json
Normal file
6
packages/app/cypress.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"baseUrl": "http://localhost:3001",
|
||||
"fixturesFolder": false,
|
||||
"pluginsFile": false,
|
||||
"retries": 3
|
||||
}
|
12
packages/app/cypress/.eslintrc.json
Normal file
12
packages/app/cypress/.eslintrc.json
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"plugins": ["cypress"],
|
||||
"extends": ["plugin:cypress/recommended"],
|
||||
"rules": {
|
||||
"jest/expect-expect": [
|
||||
"error",
|
||||
{
|
||||
"assertFunctionNames": ["expect", "cy.contains"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
6
packages/app/cypress/integration/app.js
Normal file
6
packages/app/cypress/integration/app.js
Normal file
|
@ -0,0 +1,6 @@
|
|||
describe('App', () => {
|
||||
it('should render the catalog', () => {
|
||||
cy.visit('/');
|
||||
cy.contains('My Company Catalog');
|
||||
});
|
||||
});
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* Copyright 2020 The Backstage Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { test, expect } from '@playwright/test';
|
||||
|
||||
test('App should render the welcome page', async ({ page }) => {
|
||||
await page.goto('/');
|
||||
|
||||
await expect(page.getByText('My Company Catalog')).toBeVisible();
|
||||
});
|
|
@ -11,55 +11,64 @@
|
|||
"build": "backstage-cli package build",
|
||||
"clean": "backstage-cli package clean",
|
||||
"test": "backstage-cli package test",
|
||||
"lint": "backstage-cli package lint"
|
||||
"lint": "backstage-cli package lint",
|
||||
"test:e2e": "cross-env PORT=3001 start-server-and-test start http://localhost:3001 cy:dev",
|
||||
"test:e2e:ci": "cross-env PORT=3001 start-server-and-test start http://localhost:3001 cy:run",
|
||||
"cy:dev": "cypress open",
|
||||
"cy:run": "cypress run --browser chrome"
|
||||
},
|
||||
"dependencies": {
|
||||
"@backstage-community/plugin-github-actions": "^0.6.16",
|
||||
"@backstage-community/plugin-tech-radar": "^0.7.4",
|
||||
"@backstage/app-defaults": "^1.5.17",
|
||||
"@backstage/catalog-model": "^1.7.3",
|
||||
"@backstage/cli": "^0.30.0",
|
||||
"@backstage/core-app-api": "^1.15.5",
|
||||
"@backstage/core-components": "^0.16.4",
|
||||
"@backstage/core-plugin-api": "^1.10.4",
|
||||
"@backstage/integration-react": "^1.2.4",
|
||||
"@backstage/plugin-api-docs": "^0.12.4",
|
||||
"@backstage/plugin-catalog": "^1.27.0",
|
||||
"@backstage/plugin-catalog-common": "^1.1.3",
|
||||
"@backstage/plugin-catalog-graph": "^0.4.16",
|
||||
"@backstage/plugin-catalog-import": "^0.12.10",
|
||||
"@backstage/plugin-catalog-react": "^1.15.2",
|
||||
"@backstage/plugin-home": "^0.8.5",
|
||||
"@backstage/plugin-kubernetes": "^0.12.4",
|
||||
"@backstage/plugin-org": "^0.6.36",
|
||||
"@backstage/plugin-permission-react": "^0.4.31",
|
||||
"@backstage/plugin-scaffolder": "^1.28.0",
|
||||
"@backstage/plugin-search": "^1.4.23",
|
||||
"@backstage/plugin-search-react": "^1.8.6",
|
||||
"@backstage/plugin-techdocs": "^1.12.3",
|
||||
"@backstage/plugin-techdocs-module-addons-contrib": "^1.1.21",
|
||||
"@backstage/plugin-techdocs-react": "^1.2.14",
|
||||
"@backstage/plugin-user-settings": "^0.8.19",
|
||||
"@backstage/theme": "^0.6.4",
|
||||
"@backstage/app-defaults": "^1.3.1",
|
||||
"@backstage/catalog-model": "^1.3.0",
|
||||
"@backstage/cli": "^0.22.7",
|
||||
"@backstage/core-app-api": "^1.8.0",
|
||||
"@backstage/core-components": "^0.13.1",
|
||||
"@backstage/core-plugin-api": "^1.5.1",
|
||||
"@backstage/integration-react": "^1.1.13",
|
||||
"@backstage/plugin-api-docs": "^0.9.4",
|
||||
"@backstage/plugin-catalog": "^1.11.1",
|
||||
"@backstage/plugin-catalog-common": "^1.0.13",
|
||||
"@backstage/plugin-catalog-graph": "^0.2.30",
|
||||
"@backstage/plugin-catalog-import": "^0.9.8",
|
||||
"@backstage/plugin-catalog-react": "^1.6.0",
|
||||
"@backstage/plugin-github-actions": "^0.5.18",
|
||||
"@backstage/plugin-kubernetes": "^0.9.2",
|
||||
"@backstage/plugin-org": "^0.6.8",
|
||||
"@backstage/plugin-permission-react": "^0.4.12",
|
||||
"@backstage/plugin-scaffolder": "^1.13.1",
|
||||
"@backstage/plugin-scaffolder-react": "^1.4.0",
|
||||
"@backstage/plugin-search": "^1.3.1",
|
||||
"@backstage/plugin-search-react": "^1.6.1",
|
||||
"@backstage/plugin-tech-radar": "^0.6.4",
|
||||
"@backstage/plugin-techdocs": "^1.6.3",
|
||||
"@backstage/plugin-techdocs-module-addons-contrib": "^1.0.13",
|
||||
"@backstage/plugin-techdocs-react": "^1.1.6",
|
||||
"@backstage/plugin-user-settings": "^0.7.3",
|
||||
"@backstage/theme": "^0.3.0",
|
||||
"@cnoe-io/plugin-apache-spark": "file:/var/folders/b7/h6wzrfwn6l30pn3fk5j2794dcy0vlz/T/tmp-26390-66rLxROMRq6K",
|
||||
"@cnoe-io/plugin-argo-workflows": "file:/var/folders/b7/h6wzrfwn6l30pn3fk5j2794dcy0vlz/T/tmp-32426-R2tjIfGLJy55",
|
||||
"@internal/plugin-workflows": "^0.1.0",
|
||||
"@material-ui/core": "^4.12.2",
|
||||
"@material-ui/icons": "^4.9.1",
|
||||
"@roadiehq/backstage-plugin-argo-cd": "^2.5.1",
|
||||
"@rjsf/core": "^5.8.1",
|
||||
"@rjsf/utils": "^5.8.1",
|
||||
"history": "^5.0.0",
|
||||
"react": "^18.0.2",
|
||||
"react-dom": "^18.0.2",
|
||||
"react-router": "^6.3.0",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-router-dom": "^6.3.0",
|
||||
"react-use": "^17.2.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@backstage/test-utils": "^1.7.5",
|
||||
"@playwright/test": "^1.32.3",
|
||||
"@testing-library/dom": "^9.0.0",
|
||||
"@testing-library/jest-dom": "^6.0.0",
|
||||
"@testing-library/react": "^14.0.0",
|
||||
"@backstage/test-utils": "^1.3.1",
|
||||
"@testing-library/jest-dom": "^5.10.1",
|
||||
"@testing-library/react": "^12.1.3",
|
||||
"@testing-library/user-event": "^14.0.0",
|
||||
"@types/node": "^16.11.26",
|
||||
"@types/react-dom": "*",
|
||||
"cross-env": "^7.0.0"
|
||||
"cross-env": "^7.0.0",
|
||||
"cypress": "^9.7.0",
|
||||
"eslint-plugin-cypress": "^2.10.3",
|
||||
"start-server-and-test": "^1.10.11"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
name="description"
|
||||
content="Backstage is an open platform for building developer portals"
|
||||
/>
|
||||
<link rel="apple-touch-icon" href="<%= publicPath %>/logo192.png" />
|
||||
<!--
|
||||
manifest.json provides metadata used when your web app is installed on a
|
||||
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import React from 'react';
|
||||
import { render, waitFor } from '@testing-library/react';
|
||||
import { renderWithEffects } from '@backstage/test-utils';
|
||||
import App from './App';
|
||||
|
||||
describe('App', () => {
|
||||
|
@ -20,10 +20,7 @@ describe('App', () => {
|
|||
] as any,
|
||||
};
|
||||
|
||||
const rendered = render(<App />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(rendered.baseElement).toBeInTheDocument();
|
||||
});
|
||||
const rendered = await renderWithEffects(<App />);
|
||||
expect(rendered.baseElement).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -11,9 +11,10 @@ import {
|
|||
catalogImportPlugin,
|
||||
} from '@backstage/plugin-catalog-import';
|
||||
import { ScaffolderPage, scaffolderPlugin } from '@backstage/plugin-scaffolder';
|
||||
import { ScaffolderFieldExtensions } from '@backstage/plugin-scaffolder-react';
|
||||
import { orgPlugin } from '@backstage/plugin-org';
|
||||
import { SearchPage } from '@backstage/plugin-search';
|
||||
import { TechRadarPage } from '@backstage-community/plugin-tech-radar';
|
||||
import { TechRadarPage } from '@backstage/plugin-tech-radar';
|
||||
import {
|
||||
TechDocsIndexPage,
|
||||
techdocsPlugin,
|
||||
|
@ -22,62 +23,60 @@ import {
|
|||
import { TechDocsAddons } from '@backstage/plugin-techdocs-react';
|
||||
import { ReportIssue } from '@backstage/plugin-techdocs-module-addons-contrib';
|
||||
import { UserSettingsPage } from '@backstage/plugin-user-settings';
|
||||
import {apis, keycloakOIDCAuthApiRef} from './apis';
|
||||
import { apis, keycloakOIDCAuthApiRef } from './apis';
|
||||
import { entityPage } from './components/catalog/EntityPage';
|
||||
import { searchPage } from './components/search/SearchPage';
|
||||
import { Root } from './components/Root';
|
||||
|
||||
import {AlertDisplay, OAuthRequestDialog, SignInPage} from '@backstage/core-components';
|
||||
import {
|
||||
AlertDisplay,
|
||||
OAuthRequestDialog,
|
||||
SignInPage,
|
||||
} from '@backstage/core-components';
|
||||
import { createApp } from '@backstage/app-defaults';
|
||||
import { AppRouter, FlatRoutes } from '@backstage/core-app-api';
|
||||
import { CatalogGraphPage } from '@backstage/plugin-catalog-graph';
|
||||
import { RequirePermission } from '@backstage/plugin-permission-react';
|
||||
import { catalogEntityCreatePermission } from '@backstage/plugin-catalog-common/alpha';
|
||||
import {configApiRef, useApi} from "@backstage/core-plugin-api";
|
||||
import { GetK8sOIDCTokenExtension } from './scaffolder/credentials';
|
||||
|
||||
const app = createApp({
|
||||
apis,
|
||||
components: {
|
||||
SignInPage: props => {
|
||||
const configApi = useApi(configApiRef);
|
||||
if (configApi.getString('auth.environment') === 'local') {
|
||||
return <SignInPage {...props} auto providers={['guest']} />;
|
||||
}
|
||||
return (
|
||||
<SignInPage
|
||||
{...props}
|
||||
provider={{
|
||||
id: 'keycloak-oidc',
|
||||
title: 'Keycloak',
|
||||
message: 'Sign in using Keycloak',
|
||||
apiRef: keycloakOIDCAuthApiRef,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
},
|
||||
// SignInPage: (props) => <ProxiedSignInPage {...props} provider="oauth2Proxy" />,
|
||||
SignInPage: props => (
|
||||
<SignInPage
|
||||
{...props}
|
||||
auto
|
||||
provider={{
|
||||
id: 'keycloak-oidc',
|
||||
title: 'Keycloak',
|
||||
message: 'Sign in using Keycloak',
|
||||
apiRef: keycloakOIDCAuthApiRef,
|
||||
}}
|
||||
/>
|
||||
),
|
||||
},
|
||||
bindRoutes({ bind }) {
|
||||
bind(catalogPlugin.externalRoutes, {
|
||||
createComponent: scaffolderPlugin.routes.root,
|
||||
viewTechDoc: techdocsPlugin.routes.docRoot,
|
||||
createFromTemplate: scaffolderPlugin.routes.selectedTemplate,
|
||||
});
|
||||
bind(apiDocsPlugin.externalRoutes, {
|
||||
registerApi: catalogImportPlugin.routes.importPage,
|
||||
});
|
||||
bind(scaffolderPlugin.externalRoutes, {
|
||||
registerComponent: catalogImportPlugin.routes.importPage,
|
||||
viewTechDoc: techdocsPlugin.routes.docRoot,
|
||||
});
|
||||
bind(orgPlugin.externalRoutes, {
|
||||
catalogIndex: catalogPlugin.routes.catalogIndex,
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const routes = (
|
||||
<FlatRoutes>
|
||||
<Route path="/" element={<Navigate to="home" />} />
|
||||
<Route path="/" element={<Navigate to="catalog" />} />
|
||||
<Route path="/catalog" element={<CatalogIndexPage />} />
|
||||
<Route
|
||||
path="/catalog/:namespace/:kind/:name"
|
||||
|
@ -94,7 +93,11 @@ const routes = (
|
|||
<ReportIssue />
|
||||
</TechDocsAddons>
|
||||
</Route>
|
||||
<Route path="/create" element={<ScaffolderPage />} />
|
||||
<Route path="/create" element={<ScaffolderPage />}>
|
||||
<ScaffolderFieldExtensions>
|
||||
<GetK8sOIDCTokenExtension />
|
||||
</ScaffolderFieldExtensions>
|
||||
</Route>
|
||||
<Route path="/api-docs" element={<ApiExplorerPage />} />
|
||||
<Route
|
||||
path="/tech-radar"
|
||||
|
|
|
@ -4,22 +4,30 @@ import {
|
|||
ScmAuth,
|
||||
} from '@backstage/integration-react';
|
||||
import {
|
||||
AnyApiFactory, ApiRef, BackstageIdentityApi,
|
||||
AnyApiFactory,
|
||||
ApiRef,
|
||||
BackstageIdentityApi,
|
||||
configApiRef,
|
||||
createApiFactory, createApiRef, discoveryApiRef, oauthRequestApiRef, OpenIdConnectApi, ProfileInfoApi, SessionApi,
|
||||
createApiFactory,
|
||||
createApiRef,
|
||||
discoveryApiRef,
|
||||
oauthRequestApiRef,
|
||||
OpenIdConnectApi,
|
||||
ProfileInfoApi,
|
||||
SessionApi,
|
||||
} from '@backstage/core-plugin-api';
|
||||
import {OAuth2} from "@backstage/core-app-api";
|
||||
import { OAuth2 } from '@backstage/core-app-api';
|
||||
|
||||
export const keycloakOIDCAuthApiRef: ApiRef<
|
||||
OpenIdConnectApi & ProfileInfoApi & BackstageIdentityApi & SessionApi
|
||||
> = createApiRef({
|
||||
id: 'auth.keycloak-oidc',
|
||||
id: 'auth.keycloak-oidc-provider',
|
||||
});
|
||||
export const apis: AnyApiFactory[] = [
|
||||
createApiFactory({
|
||||
api: scmIntegrationsApiRef,
|
||||
deps: {configApi: configApiRef},
|
||||
factory: ({configApi}) => ScmIntegrationsApi.fromConfig(configApi),
|
||||
deps: { configApi: configApiRef },
|
||||
factory: ({ configApi }) => ScmIntegrationsApi.fromConfig(configApi),
|
||||
}),
|
||||
ScmAuth.createDefaultApiFactory(),
|
||||
createApiFactory({
|
||||
|
|
|
@ -5,6 +5,8 @@ import ExtensionIcon from '@material-ui/icons/Extension';
|
|||
import MapIcon from '@material-ui/icons/MyLocation';
|
||||
import LibraryBooks from '@material-ui/icons/LibraryBooks';
|
||||
import CreateComponentIcon from '@material-ui/icons/AddCircleOutline';
|
||||
import LogoFull from './LogoFull';
|
||||
import LogoIcon from './LogoIcon';
|
||||
import {
|
||||
Settings as SidebarSettings,
|
||||
UserSettingsSignInAvatar,
|
||||
|
@ -19,6 +21,7 @@ import {
|
|||
SidebarPage,
|
||||
SidebarScrollWrapper,
|
||||
SidebarSpace,
|
||||
useSidebarOpenState,
|
||||
Link,
|
||||
} from '@backstage/core-components';
|
||||
import MenuIcon from '@material-ui/icons/Menu';
|
||||
|
@ -41,10 +44,12 @@ const useSidebarLogoStyles = makeStyles({
|
|||
|
||||
const SidebarLogo = () => {
|
||||
const classes = useSidebarLogoStyles();
|
||||
const { isOpen } = useSidebarOpenState();
|
||||
|
||||
return (
|
||||
<div className={classes.root}>
|
||||
<Link to="/" underline="none" className={classes.link} aria-label="Home">
|
||||
{isOpen ? <LogoFull /> : <LogoIcon />}
|
||||
</Link>
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -10,8 +10,11 @@ import {
|
|||
} from '@backstage/plugin-api-docs';
|
||||
import {
|
||||
EntityAboutCard,
|
||||
EntityDependsOnComponentsCard,
|
||||
EntityDependsOnResourcesCard,
|
||||
EntityHasComponentsCard,
|
||||
EntityHasResourcesCard,
|
||||
EntityHasSubcomponentsCard,
|
||||
EntityHasSystemsCard,
|
||||
EntityLayout,
|
||||
EntityLinksCard,
|
||||
|
@ -22,9 +25,11 @@ import {
|
|||
isKind,
|
||||
hasCatalogProcessingErrors,
|
||||
isOrphan,
|
||||
hasRelationWarnings,
|
||||
EntityRelationWarning,
|
||||
} from '@backstage/plugin-catalog';
|
||||
import {
|
||||
isGithubActionsAvailable,
|
||||
EntityGithubActionsContent,
|
||||
} from '@backstage/plugin-github-actions';
|
||||
import {
|
||||
EntityUserProfileCard,
|
||||
EntityGroupProfileCard,
|
||||
|
@ -51,12 +56,15 @@ import {
|
|||
import { TechDocsAddons } from '@backstage/plugin-techdocs-react';
|
||||
import { ReportIssue } from '@backstage/plugin-techdocs-module-addons-contrib';
|
||||
|
||||
import { EntityKubernetesContent, isKubernetesAvailable } from '@backstage/plugin-kubernetes';
|
||||
import { EntityKubernetesContent } from '@backstage/plugin-kubernetes';
|
||||
|
||||
import {
|
||||
EntityArgoCDOverviewCard,
|
||||
isArgocdAvailable
|
||||
} from '@roadiehq/backstage-plugin-argo-cd';
|
||||
EntityArgoWorkflowsOverviewCard,
|
||||
EntityArgoWorkflowsTemplateOverviewCard,
|
||||
isArgoWorkflowsAvailable,
|
||||
} from '@cnoe-io/plugin-argo-workflows';
|
||||
|
||||
import { ApacheSparkPage } from '@cnoe-io/plugin-apache-spark';
|
||||
|
||||
const techdocsContent = (
|
||||
<EntityTechdocsContent>
|
||||
|
@ -67,7 +75,13 @@ const techdocsContent = (
|
|||
);
|
||||
|
||||
const cicdContent = (
|
||||
// This is an example of how you can implement your company's logic in entity page.
|
||||
// You can for example enforce that all components of type 'service' should use GitHubActions
|
||||
<EntitySwitch>
|
||||
<EntitySwitch.Case if={isGithubActionsAvailable}>
|
||||
<EntityGithubActionsContent />
|
||||
</EntitySwitch.Case>
|
||||
|
||||
<EntitySwitch.Case>
|
||||
<EmptyState
|
||||
title="No CI/CD available for this entity"
|
||||
|
@ -97,14 +111,6 @@ const entityWarningContent = (
|
|||
</EntitySwitch.Case>
|
||||
</EntitySwitch>
|
||||
|
||||
<EntitySwitch>
|
||||
<EntitySwitch.Case if={hasRelationWarnings}>
|
||||
<Grid item xs={12}>
|
||||
<EntityRelationWarning />
|
||||
</Grid>
|
||||
</EntitySwitch.Case>
|
||||
</EntitySwitch>
|
||||
|
||||
<EntitySwitch>
|
||||
<EntitySwitch.Case if={hasCatalogProcessingErrors}>
|
||||
<Grid item xs={12}>
|
||||
|
@ -122,18 +128,25 @@ const overviewContent = (
|
|||
<EntityAboutCard variant="gridItem" />
|
||||
</Grid>
|
||||
<EntitySwitch>
|
||||
<EntitySwitch.Case if={e => Boolean(isArgocdAvailable(e))}>
|
||||
<EntitySwitch.Case if={e => isArgoWorkflowsAvailable(e)}>
|
||||
<Grid item md={6}>
|
||||
<EntityArgoCDOverviewCard />
|
||||
<EntityArgoWorkflowsOverviewCard />
|
||||
</Grid>
|
||||
<Grid item md={6}>
|
||||
<EntityArgoWorkflowsTemplateOverviewCard />
|
||||
</Grid>
|
||||
</EntitySwitch.Case>
|
||||
</EntitySwitch>
|
||||
<Grid item md={6} xs={12}>
|
||||
<EntityCatalogGraphCard variant="gridItem" height={400} />
|
||||
</Grid>
|
||||
|
||||
<Grid item md={4} xs={12}>
|
||||
<EntityLinksCard />
|
||||
</Grid>
|
||||
<Grid item md={8} xs={12}>
|
||||
<EntityHasSubcomponentsCard variant="gridItem" />
|
||||
</Grid>
|
||||
</Grid>
|
||||
);
|
||||
|
||||
|
@ -147,10 +160,6 @@ const serviceEntityPage = (
|
|||
{cicdContent}
|
||||
</EntityLayout.Route>
|
||||
|
||||
<EntityLayout.Route path="/kubernetes" title="Kubernetes" if={e => isKubernetesAvailable(e)}>
|
||||
<EntityKubernetesContent refreshIntervalMs={30000} />
|
||||
</EntityLayout.Route>
|
||||
|
||||
<EntityLayout.Route path="/api" title="API">
|
||||
<Grid container spacing={3} alignItems="stretch">
|
||||
<Grid item md={6}>
|
||||
|
@ -162,6 +171,17 @@ const serviceEntityPage = (
|
|||
</Grid>
|
||||
</EntityLayout.Route>
|
||||
|
||||
<EntityLayout.Route path="/dependencies" title="Dependencies">
|
||||
<Grid container spacing={3} alignItems="stretch">
|
||||
<Grid item md={6}>
|
||||
<EntityDependsOnComponentsCard variant="gridItem" />
|
||||
</Grid>
|
||||
<Grid item md={6}>
|
||||
<EntityDependsOnResourcesCard variant="gridItem" />
|
||||
</Grid>
|
||||
</Grid>
|
||||
</EntityLayout.Route>
|
||||
|
||||
<EntityLayout.Route path="/docs" title="Docs">
|
||||
{techdocsContent}
|
||||
</EntityLayout.Route>
|
||||
|
@ -178,12 +198,34 @@ const websiteEntityPage = (
|
|||
{cicdContent}
|
||||
</EntityLayout.Route>
|
||||
|
||||
<EntityLayout.Route path="/dependencies" title="Dependencies">
|
||||
<Grid container spacing={3} alignItems="stretch">
|
||||
<Grid item md={6}>
|
||||
<EntityDependsOnComponentsCard variant="gridItem" />
|
||||
</Grid>
|
||||
<Grid item md={6}>
|
||||
<EntityDependsOnResourcesCard variant="gridItem" />
|
||||
</Grid>
|
||||
</Grid>
|
||||
</EntityLayout.Route>
|
||||
|
||||
<EntityLayout.Route path="/docs" title="Docs">
|
||||
{techdocsContent}
|
||||
</EntityLayout.Route>
|
||||
</EntityLayout>
|
||||
);
|
||||
|
||||
const jobEntityPage = (
|
||||
<EntityLayout>
|
||||
<EntityLayout.Route path="/" title="Overview">
|
||||
{overviewContent}
|
||||
</EntityLayout.Route>
|
||||
<EntityLayout.Route path="/apache-spark" title="Apache Spark">
|
||||
<ApacheSparkPage />
|
||||
</EntityLayout.Route>
|
||||
</EntityLayout>
|
||||
);
|
||||
|
||||
/**
|
||||
* NOTE: This page is designed to work on small screens such as mobile devices.
|
||||
* This is based on Material UI Grid. If breakpoints are used, each grid item must set the `xs` prop to a column size or to `true`,
|
||||
|
@ -212,6 +254,9 @@ const componentPage = (
|
|||
<EntitySwitch.Case if={isComponentType('website')}>
|
||||
{websiteEntityPage}
|
||||
</EntitySwitch.Case>
|
||||
<EntitySwitch.Case if={isComponentType('job')}>
|
||||
{jobEntityPage}
|
||||
</EntitySwitch.Case>
|
||||
|
||||
<EntitySwitch.Case>{defaultEntityPage}</EntitySwitch.Case>
|
||||
</EntitySwitch>
|
||||
|
@ -228,6 +273,9 @@ const apiPage = (
|
|||
<Grid item md={6} xs={12}>
|
||||
<EntityCatalogGraphCard variant="gridItem" height={400} />
|
||||
</Grid>
|
||||
<Grid item md={4} xs={12}>
|
||||
<EntityLinksCard />
|
||||
</Grid>
|
||||
<Grid container item md={12}>
|
||||
<Grid item md={6}>
|
||||
<EntityProvidingComponentsCard />
|
||||
|
@ -276,12 +324,9 @@ const groupPage = (
|
|||
<Grid item xs={12} md={6}>
|
||||
<EntityOwnershipCard variant="gridItem" />
|
||||
</Grid>
|
||||
<Grid item xs={12} md={6}>
|
||||
<Grid item xs={12}>
|
||||
<EntityMembersListCard />
|
||||
</Grid>
|
||||
<Grid item xs={12} md={6}>
|
||||
<EntityLinksCard />
|
||||
</Grid>
|
||||
</Grid>
|
||||
</EntityLayout.Route>
|
||||
</EntityLayout>
|
||||
|
@ -331,6 +376,9 @@ const systemPage = (
|
|||
unidirectional={false}
|
||||
/>
|
||||
</EntityLayout.Route>
|
||||
<EntityLayout.Route path="/kubernetes" title="Kubernetes">
|
||||
<EntityKubernetesContent refreshIntervalMs={30000} />
|
||||
</EntityLayout.Route>
|
||||
</EntityLayout>
|
||||
);
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import '@backstage/cli/asset-types';
|
||||
import React from 'react';
|
||||
import ReactDOM from 'react-dom/client';
|
||||
import ReactDOM from 'react-dom';
|
||||
import App from './App';
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root')!).render(<App />);
|
||||
ReactDOM.render(<App />, document.getElementById('root'));
|
||||
|
|
|
@ -0,0 +1,163 @@
|
|||
import React, { useState } from 'react';
|
||||
import {
|
||||
createScaffolderLayout,
|
||||
LayoutTemplate,
|
||||
} from '@backstage/plugin-scaffolder-react';
|
||||
import { scaffolderPlugin } from '@backstage/plugin-scaffolder';
|
||||
import { Button, Grid } from '@material-ui/core';
|
||||
import {
|
||||
ObjectFieldTemplatePropertyType,
|
||||
ObjectFieldTemplateProps,
|
||||
StrictRJSFSchema,
|
||||
FormContextType,
|
||||
RJSFSchema,
|
||||
titleId,
|
||||
getTemplate,
|
||||
getUiOptions,
|
||||
} from '@rjsf/utils';
|
||||
|
||||
const TwoColumn: LayoutTemplate = ({ properties, description, title }) => {
|
||||
const mid = Math.ceil(properties.length / 2);
|
||||
|
||||
return (
|
||||
<>
|
||||
<h1>{title}</h1>
|
||||
<h2>In two column layout!!</h2>
|
||||
<Grid container justifyContent="flex-end">
|
||||
{properties.slice(0, mid).map(prop => (
|
||||
<Grid item xs={6} key={prop.content.key}>
|
||||
{prop.content}
|
||||
</Grid>
|
||||
))}
|
||||
{properties.slice(mid).map(prop => (
|
||||
<Grid item xs={6} key={prop.content.key}>
|
||||
{prop.content}
|
||||
</Grid>
|
||||
))}
|
||||
</Grid>
|
||||
{description}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
function CollapsableFieldTemplate<
|
||||
T = any,
|
||||
S extends StrictRJSFSchema = RJSFSchema,
|
||||
F extends FormContextType = any,
|
||||
>(props: ObjectFieldTemplateProps<T, S, F>) {
|
||||
const {
|
||||
registry,
|
||||
properties,
|
||||
title,
|
||||
description,
|
||||
uiSchema,
|
||||
required,
|
||||
schema,
|
||||
idSchema,
|
||||
} = props;
|
||||
const [collapsed, setCollapsed] = useState(false);
|
||||
|
||||
const out = (
|
||||
<div>
|
||||
{title} hiii{description}
|
||||
<Button
|
||||
variant="outlined"
|
||||
size="small"
|
||||
style={{
|
||||
display: 'inline-block',
|
||||
float: 'right',
|
||||
fontSize: 'large',
|
||||
}}
|
||||
onClick={() => setCollapsed(!collapsed)}
|
||||
>
|
||||
Collapse
|
||||
</Button>
|
||||
<div>
|
||||
{collapsed
|
||||
? null
|
||||
: properties.map(prop => (
|
||||
<div key={prop.content.key}>
|
||||
<Button
|
||||
variant="outlined"
|
||||
size="small"
|
||||
style={{
|
||||
display: 'inline-block',
|
||||
float: 'right',
|
||||
fontSize: 'large',
|
||||
}}
|
||||
onClick={() => setCollapsed(!collapsed)}
|
||||
>
|
||||
Collapse
|
||||
</Button>
|
||||
{prop.content}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
return out;
|
||||
// return (
|
||||
// <>
|
||||
// {hidden ? null : (
|
||||
// <div className={classNames}>
|
||||
// <>
|
||||
// {!isThisTheTopmostElement() && (
|
||||
// <Button
|
||||
// variant="outlined"
|
||||
// size="small"
|
||||
// style={{
|
||||
// display: 'inline-block',
|
||||
// float: 'right',
|
||||
// fontSize: 'large',
|
||||
// }}
|
||||
// onClick={() => setCollapsed(!collapsed)}
|
||||
// >
|
||||
// {collapsed ? (
|
||||
// <>
|
||||
// +
|
||||
// {(errors?.props?.errors ?? []).length ? (
|
||||
// <span style={{ fontSize: 'small' }}>
|
||||
// {' '}
|
||||
// (Contains errors)
|
||||
// </span>
|
||||
// ) : null}
|
||||
// </>
|
||||
// ) : (
|
||||
// '-'
|
||||
// )}
|
||||
// </Button>
|
||||
// )}
|
||||
// {get(schema, 'type', undefined) !== 'object' &&
|
||||
// get(schema, 'type', undefined) !== 'array' ? (
|
||||
// <>{label ? `${label}${required ? ' *required' : ''}` : null}</>
|
||||
// ) : (
|
||||
// <fieldset className="field field-array field-array-of-object">
|
||||
// {label ? (
|
||||
// <legend>{`${label}${required ? '*required' : ''}`}</legend>
|
||||
// ) : null}
|
||||
// </fieldset>
|
||||
// )}
|
||||
// {!collapsed && (
|
||||
// <>
|
||||
// {get(schema, 'type', undefined) !== 'object' &&
|
||||
// get(schema, 'type', undefined) !== 'array'
|
||||
// ? description
|
||||
// : null}
|
||||
// {children}
|
||||
// {errors}
|
||||
// {help}
|
||||
// </>
|
||||
// )}
|
||||
// </>
|
||||
// </div>
|
||||
// )}
|
||||
// </>
|
||||
// );
|
||||
}
|
||||
|
||||
export const CollapsableField = scaffolderPlugin.provide(
|
||||
createScaffolderLayout({
|
||||
name: 'CollapsableField',
|
||||
component: CollapsableFieldTemplate,
|
||||
}),
|
||||
);
|
14
packages/app/src/scaffolder/credentials/extensions.ts
Normal file
14
packages/app/src/scaffolder/credentials/extensions.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
import {scaffolderPlugin,} from '@backstage/plugin-scaffolder';
|
||||
import {createScaffolderFieldExtension} from "@backstage/plugin-scaffolder-react";
|
||||
import {GetK8sOIDCToken} from "./getOIDCToken";
|
||||
import {ClusterPickerSchema} from "./schema";
|
||||
|
||||
export const GetK8sOIDCTokenExtension = scaffolderPlugin.provide(
|
||||
createScaffolderFieldExtension(
|
||||
{
|
||||
name: 'GetK8sOIDCToken',
|
||||
component: GetK8sOIDCToken,
|
||||
schema: ClusterPickerSchema,
|
||||
}
|
||||
)
|
||||
)
|
102
packages/app/src/scaffolder/credentials/getOIDCToken.tsx
Normal file
102
packages/app/src/scaffolder/credentials/getOIDCToken.tsx
Normal file
|
@ -0,0 +1,102 @@
|
|||
import React, {useCallback, useEffect} from 'react';
|
||||
import FormControl from '@material-ui/core/FormControl';
|
||||
import {
|
||||
useApi, configApiRef, discoveryApiRef, oauthRequestApiRef
|
||||
} from "@backstage/core-plugin-api";
|
||||
import { kubernetesApiRef } from "@backstage/plugin-kubernetes";
|
||||
import { FormHelperText } from "@material-ui/core";
|
||||
import {Progress, Select} from "@backstage/core-components";
|
||||
import useAsync from "react-use/lib/useAsync";
|
||||
import {useTemplateSecrets} from "@backstage/plugin-scaffolder-react";
|
||||
import {ClusterPickerProps} from "./schema";
|
||||
import {OAuth2} from "@backstage/core-app-api";
|
||||
|
||||
|
||||
export const GetK8sOIDCToken = (props: ClusterPickerProps) => {
|
||||
|
||||
const k8sApi = useApi(kubernetesApiRef)
|
||||
const { setSecrets, secrets } = useTemplateSecrets();
|
||||
|
||||
const discoveryApi = useApi(discoveryApiRef)
|
||||
const oauthRequestApi = useApi(oauthRequestApiRef)
|
||||
const configApi = useApi(configApiRef)
|
||||
|
||||
const {uiSchema, required} = props
|
||||
let {rawErrors} = props
|
||||
const {value: {clusters} = {clusters: []}, loading } = useAsync(
|
||||
async () => {
|
||||
const c = await k8sApi.getClusters()
|
||||
return {clusters: c.map(i => ({ label: i.name, value: i.name}))}
|
||||
}
|
||||
)
|
||||
if (!rawErrors) {
|
||||
rawErrors = []
|
||||
}
|
||||
|
||||
const getToken = useCallback( async (clusterName: string) => {
|
||||
const {requestUserCredentials} = uiSchema?.['ui:options'] ?? {}
|
||||
if (!requestUserCredentials) {
|
||||
return;
|
||||
}
|
||||
const cs = await k8sApi.getClusters()
|
||||
const cluster = cs.find(c => {
|
||||
return c.name === clusterName
|
||||
})
|
||||
if (cluster?.oidcTokenProvider === undefined) {
|
||||
throw new Error("no oidc provider defined for this cluster")
|
||||
}
|
||||
|
||||
const oidc = OAuth2.create({
|
||||
discoveryApi,
|
||||
oauthRequestApi,
|
||||
provider: {
|
||||
id: cluster.oidcTokenProvider,
|
||||
title: 'OIDC',
|
||||
icon: () => null,
|
||||
},
|
||||
environment: configApi.getOptionalString('auth.environment'),
|
||||
defaultScopes: ['openid', 'profile', 'email', 'groups'],
|
||||
})
|
||||
const token = await oidc.getIdToken()
|
||||
|
||||
setSecrets({ [requestUserCredentials.secretKey]: token })
|
||||
}, [configApi, discoveryApi, k8sApi, oauthRequestApi, setSecrets, uiSchema]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
const {requestUserCredentials} = uiSchema?.['ui:options'] ?? {}
|
||||
if (!requestUserCredentials?.secretKey || secrets[requestUserCredentials?.secretKey!]) {
|
||||
return
|
||||
}
|
||||
|
||||
if (clusters.length) {
|
||||
getToken(clusters[0].value).catch(console.error)
|
||||
}
|
||||
}, [clusters, getToken, secrets, uiSchema])
|
||||
|
||||
if (loading) {
|
||||
return <Progress />;
|
||||
}
|
||||
|
||||
return (
|
||||
<FormControl
|
||||
margin="normal"
|
||||
required={required}
|
||||
error={rawErrors?.length > 0}
|
||||
>
|
||||
<Select
|
||||
native
|
||||
label="Cluster"
|
||||
items={clusters}
|
||||
onChange={e => getToken(e.toString())}
|
||||
placeholder="select one"
|
||||
/>
|
||||
<FormHelperText id="entityName">
|
||||
Kubernetes Cluster Name
|
||||
</FormHelperText>
|
||||
</FormControl>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
|
1
packages/app/src/scaffolder/credentials/index.ts
Normal file
1
packages/app/src/scaffolder/credentials/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export {GetK8sOIDCTokenExtension} from './extensions'
|
20
packages/app/src/scaffolder/credentials/schema.ts
Normal file
20
packages/app/src/scaffolder/credentials/schema.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
import { z } from 'zod';
|
||||
import {makeFieldSchemaFromZod} from "@backstage/plugin-scaffolder";
|
||||
|
||||
export const ClusterPickerFieldSchema = makeFieldSchemaFromZod(
|
||||
z.string(),
|
||||
z.object( {
|
||||
requestUserCredentials: z.object({
|
||||
secretKey: z.string().describe('Key used within the template secrets context to store the credential')
|
||||
}
|
||||
)
|
||||
.optional()
|
||||
.describe('If defined will request user credentials to auth against the cluster')
|
||||
})
|
||||
)
|
||||
|
||||
export const ClusterPickerSchema = ClusterPickerFieldSchema.schema
|
||||
|
||||
export type ClusterPickerProps = typeof ClusterPickerFieldSchema.type
|
||||
|
||||
export type ClusterPickerUiOptions = typeof ClusterPickerFieldSchema.uiOptionsType
|
|
@ -9,21 +9,15 @@
|
|||
#
|
||||
# Once the commands have been run, you can build the image using `yarn build-image`
|
||||
|
||||
FROM node:18-bookworm-slim
|
||||
|
||||
# Install isolate-vm dependencies, these are needed by the @backstage/plugin-scaffolder-backend.
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends python3 g++ build-essential && \
|
||||
yarn config set python /usr/bin/python3
|
||||
FROM node:16-bullseye-slim
|
||||
|
||||
# Install sqlite3 dependencies. You can skip this if you don't use sqlite3 in the image,
|
||||
# in which case you should also move better-sqlite3 to "devDependencies" in package.json.
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends libsqlite3-dev
|
||||
apt-get install -y --no-install-recommends libsqlite3-dev python3 build-essential && \
|
||||
yarn config set python /usr/bin/python3
|
||||
|
||||
# From here on we use the least-privileged `node` user to run the backend.
|
||||
USER node
|
||||
|
|
|
@ -36,7 +36,7 @@ The backend starts up on port 7007 per default.
|
|||
If you want to use the catalog functionality, you need to add so called
|
||||
locations to the backend. These are places where the backend can find some
|
||||
entity descriptor data to consume and serve. For more information, see
|
||||
[Software Catalog Overview - Adding Components to the Catalog](https://backstage.io/docs/features/software-catalog/#adding-components-to-the-catalog).
|
||||
[Software Catalog Overview - Adding Components to the Catalog](https://backstage.io/docs/features/software-catalog/software-catalog-overview#adding-components-to-the-catalog).
|
||||
|
||||
To get started quickly, this template already includes some statically configured example locations
|
||||
in `app-config.yaml` under `catalog.locations`. You can remove and replace these locations as you
|
||||
|
@ -56,4 +56,4 @@ and
|
|||
## Documentation
|
||||
|
||||
- [Backstage Readme](https://github.com/backstage/backstage/blob/master/README.md)
|
||||
- [Backstage Documentation](https://backstage.io/docs)
|
||||
- [Backstage Documentation](https://github.com/backstage/backstage/blob/master/docs/README.md)
|
||||
|
|
|
@ -16,57 +16,43 @@
|
|||
"build-image": "docker build ../.. -f Dockerfile --tag backstage"
|
||||
},
|
||||
"dependencies": {
|
||||
"@backstage/backend-common": "^0.25.0",
|
||||
"@backstage/backend-defaults": "^0.8.1",
|
||||
"@backstage/backend-plugin-api": "^1.2.0",
|
||||
"@backstage/backend-tasks": "^0.6.1",
|
||||
"@backstage/catalog-client": "^1.9.1",
|
||||
"@backstage/catalog-model": "^1.7.3",
|
||||
"@backstage/config": "^1.3.2",
|
||||
"@backstage/errors": "^1.2.7",
|
||||
"@backstage/integration": "^1.16.1",
|
||||
"@backstage/plugin-app-backend": "^0.4.5",
|
||||
"@backstage/plugin-auth-backend": "^0.24.3",
|
||||
"@backstage/plugin-auth-backend-module-guest-provider": "^0.2.5",
|
||||
"@backstage/plugin-auth-backend-module-oidc-provider": "^0.4.0",
|
||||
"@backstage/plugin-auth-node": "^0.6.0",
|
||||
"@backstage/plugin-catalog-backend": "^1.31.0",
|
||||
"@backstage/plugin-catalog-backend-module-scaffolder-entity-model": "^0.2.5",
|
||||
"@backstage/plugin-kubernetes-backend": "^0.19.3",
|
||||
"@backstage/plugin-permission-common": "^0.8.4",
|
||||
"@backstage/plugin-permission-node": "^0.8.8",
|
||||
"@backstage/plugin-proxy-backend": "^0.5.11",
|
||||
"@backstage/plugin-scaffolder-backend": "^1.30.0",
|
||||
"@backstage/plugin-scaffolder-backend-module-gitea": "^0.2.6",
|
||||
"@backstage/plugin-scaffolder-backend-module-github": "^0.6.0",
|
||||
"@backstage/plugin-scaffolder-node": "^0.7.0",
|
||||
"@backstage/plugin-search-backend": "^1.8.2",
|
||||
"@backstage/plugin-search-backend-module-catalog": "^0.3.1",
|
||||
"@backstage/plugin-search-backend-module-pg": "^0.5.41",
|
||||
"@backstage/plugin-search-backend-module-techdocs": "^0.3.6",
|
||||
"@backstage/plugin-search-backend-node": "^1.3.8",
|
||||
"@backstage/plugin-techdocs-backend": "^1.11.6",
|
||||
"@backstage/types": "^1.2.1",
|
||||
"@kubernetes/client-node": "~0.20.0",
|
||||
"@roadiehq/backstage-plugin-argo-cd-backend": "3.1.0",
|
||||
"@roadiehq/scaffolder-backend-module-http-request": "^4.3.5",
|
||||
"@roadiehq/scaffolder-backend-module-utils": "3.0.0",
|
||||
"@backstage/backend-common": "^0.19.0",
|
||||
"@backstage/backend-tasks": "^0.5.2",
|
||||
"@backstage/catalog-client": "^1.4.1",
|
||||
"@backstage/catalog-model": "^1.3.0",
|
||||
"@backstage/config": "^1.0.7",
|
||||
"@backstage/integration": "^1.4.5",
|
||||
"@backstage/plugin-app-backend": "^0.3.45",
|
||||
"@backstage/plugin-auth-backend": "^0.18.3",
|
||||
"@backstage/plugin-auth-node": "^0.2.14",
|
||||
"@backstage/plugin-catalog-backend": "^1.9.1",
|
||||
"@backstage/plugin-kubernetes-backend": "^0.11.2",
|
||||
"@backstage/plugin-permission-common": "^0.7.5",
|
||||
"@backstage/plugin-permission-node": "^0.7.8",
|
||||
"@backstage/plugin-proxy-backend": "^0.2.39",
|
||||
"@backstage/plugin-scaffolder-backend": "^1.14.0",
|
||||
"@backstage/plugin-scaffolder-node": "^0.1.3",
|
||||
"@backstage/plugin-search-backend": "^1.3.1",
|
||||
"@backstage/plugin-search-backend-module-pg": "^0.5.6",
|
||||
"@backstage/plugin-search-backend-node": "^1.2.1",
|
||||
"@backstage/plugin-techdocs-backend": "^1.6.2",
|
||||
"@backstage/types": "^1.1.0",
|
||||
"@kubernetes/client-node": "^0.18.1",
|
||||
"@roadiehq/scaffolder-backend-module-utils": "^1.8.7",
|
||||
"app": "link:../app",
|
||||
"better-sqlite3": "^9.0.0",
|
||||
"better-sqlite3": "^8.0.0",
|
||||
"dockerode": "^3.3.1",
|
||||
"express": "^4.17.1",
|
||||
"express-promise-router": "^4.1.0",
|
||||
"fs-extra": "~11.2.0",
|
||||
"node-gyp": "^9.0.0",
|
||||
"pg": "^8.11.3",
|
||||
"winston": "^3.2.1"
|
||||
"pg": "^8.3.0",
|
||||
"winston": "^3.2.1",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@backstage/cli": "^0.30.0",
|
||||
"@backstage/cli": "^0.22.7",
|
||||
"@types/dockerode": "^3.3.0",
|
||||
"@types/express": "^4.17.6",
|
||||
"@types/express-serve-static-core": "^4.17.5",
|
||||
"@types/fs-extra": "^11.0.4",
|
||||
"@types/luxon": "^2.0.4"
|
||||
},
|
||||
"files": [
|
||||
|
|
|
@ -1,45 +1,121 @@
|
|||
import { createBackend } from '@backstage/backend-defaults';
|
||||
import { cnoeScaffolderActions } from './plugins/scaffolder';
|
||||
import { authModuleKeycloakOIDCProvider } from './plugins/auth';
|
||||
/*
|
||||
* Hi!
|
||||
*
|
||||
* Note that this is an EXAMPLE Backstage backend. Please check the README.
|
||||
*
|
||||
* Happy hacking!
|
||||
*/
|
||||
|
||||
const backend = createBackend();
|
||||
import Router from 'express-promise-router';
|
||||
import {
|
||||
createServiceBuilder,
|
||||
loadBackendConfig,
|
||||
getRootLogger,
|
||||
useHotMemoize,
|
||||
notFoundHandler,
|
||||
CacheManager,
|
||||
DatabaseManager,
|
||||
SingleHostDiscovery,
|
||||
UrlReaders,
|
||||
ServerTokenManager,
|
||||
} from '@backstage/backend-common';
|
||||
import { TaskScheduler } from '@backstage/backend-tasks';
|
||||
import { Config } from '@backstage/config';
|
||||
import app from './plugins/app';
|
||||
import auth from './plugins/auth';
|
||||
import catalog from './plugins/catalog';
|
||||
import scaffolder from './plugins/scaffolder';
|
||||
import proxy from './plugins/proxy';
|
||||
import techdocs from './plugins/techdocs';
|
||||
import search from './plugins/search';
|
||||
import { PluginEnvironment } from './types';
|
||||
import { ServerPermissionClient } from '@backstage/plugin-permission-node';
|
||||
import { DefaultIdentityClient } from '@backstage/plugin-auth-node';
|
||||
|
||||
// core plugins
|
||||
backend.add(import('@backstage/plugin-app-backend'));
|
||||
backend.add(import('@backstage/plugin-catalog-backend'));
|
||||
backend.add(import('@backstage/plugin-proxy-backend'));
|
||||
backend.add(import('@backstage/plugin-techdocs-backend/alpha'));
|
||||
import kubernetes from './plugins/kubernetes';
|
||||
|
||||
// auth plugins
|
||||
backend.add(import('@backstage/plugin-auth-backend'));
|
||||
backend.add(import('@backstage/plugin-auth-backend-module-guest-provider'));
|
||||
function makeCreateEnv(config: Config) {
|
||||
const root = getRootLogger();
|
||||
const reader = UrlReaders.default({ logger: root, config });
|
||||
const discovery = SingleHostDiscovery.fromConfig(config);
|
||||
const cacheManager = CacheManager.fromConfig(config);
|
||||
const databaseManager = DatabaseManager.fromConfig(config, { logger: root });
|
||||
const tokenManager = ServerTokenManager.noop();
|
||||
const taskScheduler = TaskScheduler.fromConfig(config);
|
||||
|
||||
// scaffolder plugins
|
||||
backend.add(import('@backstage/plugin-scaffolder-backend/alpha'));
|
||||
backend.add(
|
||||
import('@backstage/plugin-catalog-backend-module-scaffolder-entity-model'),
|
||||
);
|
||||
backend.add(import('@backstage/plugin-scaffolder-backend-module-github'));
|
||||
const identity = DefaultIdentityClient.create({
|
||||
discovery,
|
||||
});
|
||||
const permissions = ServerPermissionClient.fromConfig(config, {
|
||||
discovery,
|
||||
tokenManager,
|
||||
});
|
||||
|
||||
// search plugins
|
||||
backend.add(import('@backstage/plugin-search-backend/alpha'));
|
||||
root.info(`Created UrlReader ${reader}`);
|
||||
|
||||
backend.add(import('@backstage/plugin-search-backend-module-catalog'));
|
||||
backend.add(import('@backstage/plugin-search-backend-module-techdocs/alpha'));
|
||||
return (plugin: string): PluginEnvironment => {
|
||||
const logger = root.child({ type: 'plugin', plugin });
|
||||
const database = databaseManager.forPlugin(plugin);
|
||||
const cache = cacheManager.forPlugin(plugin);
|
||||
const scheduler = taskScheduler.forPlugin(plugin);
|
||||
return {
|
||||
logger,
|
||||
database,
|
||||
cache,
|
||||
config,
|
||||
reader,
|
||||
discovery,
|
||||
tokenManager,
|
||||
scheduler,
|
||||
permissions,
|
||||
identity,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
// other @backstage plugins
|
||||
backend.add(import('@backstage/plugin-kubernetes-backend'));
|
||||
async function main() {
|
||||
const config = await loadBackendConfig({
|
||||
argv: process.argv,
|
||||
logger: getRootLogger(),
|
||||
});
|
||||
const createEnv = makeCreateEnv(config);
|
||||
|
||||
// roadie plugins
|
||||
backend.add(import('@roadiehq/scaffolder-backend-module-utils/new-backend'));
|
||||
backend.add(import('./plugins/argocd_index'));
|
||||
const catalogEnv = useHotMemoize(module, () => createEnv('catalog'));
|
||||
const scaffolderEnv = useHotMemoize(module, () => createEnv('scaffolder'));
|
||||
const authEnv = useHotMemoize(module, () => createEnv('auth'));
|
||||
const proxyEnv = useHotMemoize(module, () => createEnv('proxy'));
|
||||
const techdocsEnv = useHotMemoize(module, () => createEnv('techdocs'));
|
||||
const searchEnv = useHotMemoize(module, () => createEnv('search'));
|
||||
const appEnv = useHotMemoize(module, () => createEnv('app'));
|
||||
|
||||
backend.add(
|
||||
import('@roadiehq/scaffolder-backend-module-http-request/new-backend'),
|
||||
);
|
||||
const kubernetesEnv = useHotMemoize(module, () => createEnv('kubernetes'));
|
||||
|
||||
// cnoe plugins
|
||||
backend.add(authModuleKeycloakOIDCProvider);
|
||||
backend.add(cnoeScaffolderActions);
|
||||
const apiRouter = Router();
|
||||
apiRouter.use('/catalog', await catalog(catalogEnv));
|
||||
apiRouter.use('/scaffolder', await scaffolder(scaffolderEnv));
|
||||
apiRouter.use('/auth', await auth(authEnv));
|
||||
apiRouter.use('/techdocs', await techdocs(techdocsEnv));
|
||||
apiRouter.use('/proxy', await proxy(proxyEnv));
|
||||
apiRouter.use('/search', await search(searchEnv));
|
||||
|
||||
backend.start();
|
||||
apiRouter.use('/kubernetes', await kubernetes(kubernetesEnv));
|
||||
|
||||
// Add backends ABOVE this line; this 404 handler is the catch-all fallback
|
||||
apiRouter.use(notFoundHandler());
|
||||
|
||||
const service = createServiceBuilder(module)
|
||||
.loadConfig(config)
|
||||
.addRouter('/api', apiRouter)
|
||||
.addRouter('', await app(appEnv));
|
||||
|
||||
await service.start().catch(err => {
|
||||
console.log(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.hot?.accept();
|
||||
main().catch(error => {
|
||||
console.error('Backend failed to start up', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
14
packages/backend/src/plugins/app.ts
Normal file
14
packages/backend/src/plugins/app.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
import { createRouter } from '@backstage/plugin-app-backend';
|
||||
import { Router } from 'express';
|
||||
import { PluginEnvironment } from '../types';
|
||||
|
||||
export default async function createPlugin(
|
||||
env: PluginEnvironment,
|
||||
): Promise<Router> {
|
||||
return await createRouter({
|
||||
logger: env.logger,
|
||||
config: env.config,
|
||||
database: env.database,
|
||||
appPackageName: 'app',
|
||||
});
|
||||
}
|
|
@ -1,174 +0,0 @@
|
|||
import { Config } from '@backstage/config';
|
||||
import { createTemplateAction } from '@backstage/plugin-scaffolder-node';
|
||||
import { examples } from './gitea-actions';
|
||||
import { Logger } from 'winston';
|
||||
import { ArgoService } from '@roadiehq/backstage-plugin-argo-cd-backend';
|
||||
import { createRouter } from '@roadiehq/backstage-plugin-argo-cd-backend';
|
||||
//import { PluginEnvironment } from '../types';
|
||||
|
||||
|
||||
/*export default async function createPlugin({
|
||||
logger,
|
||||
config,
|
||||
}: PluginEnvironment) {
|
||||
return await createRouter({ logger, config });
|
||||
}*/
|
||||
|
||||
import { loggerToWinstonLogger } from '@backstage/backend-common';
|
||||
|
||||
import {
|
||||
coreServices,
|
||||
createBackendPlugin,
|
||||
} from '@backstage/backend-plugin-api';
|
||||
|
||||
export const argocdPlugin = createBackendPlugin({
|
||||
pluginId: 'argocd',
|
||||
register(env) {
|
||||
env.registerInit({
|
||||
deps: {
|
||||
logger: coreServices.logger,
|
||||
config: coreServices.rootConfig,
|
||||
reader: coreServices.urlReader,
|
||||
discovery: coreServices.discovery,
|
||||
auth: coreServices.auth,
|
||||
//tokenManager: coreServices.tokenManager,
|
||||
httpRouter: coreServices.httpRouter,
|
||||
},
|
||||
async init({
|
||||
logger,
|
||||
config,
|
||||
httpRouter,
|
||||
}) {
|
||||
httpRouter.use(
|
||||
await createRouter({
|
||||
logger: loggerToWinstonLogger(logger),
|
||||
config,
|
||||
}),
|
||||
);
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
|
||||
export function createArgoCDApp(options: { config: Config; logger: Logger }) {
|
||||
const { config, logger } = options;
|
||||
|
||||
return createTemplateAction<{
|
||||
repoUrl: string;
|
||||
projectName?: string;
|
||||
appName: string;
|
||||
argoInstance: string;
|
||||
path: string;
|
||||
labelValue?: string;
|
||||
appNamespace: string;
|
||||
}>({
|
||||
id: 'cnoe:create-argocd-app',
|
||||
description: 'creates argocd app',
|
||||
examples,
|
||||
schema: {
|
||||
input: {
|
||||
type: 'object',
|
||||
required: [
|
||||
'repoUrl',
|
||||
'projectName',
|
||||
'appName',
|
||||
'argoInstance',
|
||||
'path',
|
||||
'appNamespace',
|
||||
],
|
||||
properties: {
|
||||
repoUrl: {
|
||||
title: 'Repository Location',
|
||||
type: 'string',
|
||||
},
|
||||
projectName: {
|
||||
title: 'name of the project in argocd',
|
||||
type: 'string',
|
||||
},
|
||||
appName: {
|
||||
title: 'application name in argocd',
|
||||
type: 'string',
|
||||
},
|
||||
appNamespace: {
|
||||
title: 'application name in argocd',
|
||||
type: 'string',
|
||||
},
|
||||
argoInstance: {
|
||||
title: 'backstage argocd instance name defined in app-config.yaml',
|
||||
type: 'string',
|
||||
},
|
||||
path: {
|
||||
title: 'argocd spec path',
|
||||
type: 'string',
|
||||
},
|
||||
labelValue: {
|
||||
title: 'for argocd plugin to locate this app',
|
||||
type: 'string',
|
||||
},
|
||||
},
|
||||
},
|
||||
output: {},
|
||||
},
|
||||
async handler(ctx) {
|
||||
const {
|
||||
repoUrl,
|
||||
projectName,
|
||||
appName,
|
||||
argoInstance,
|
||||
path,
|
||||
labelValue,
|
||||
appNamespace,
|
||||
} = ctx.input;
|
||||
|
||||
const argoUserName =
|
||||
config.getOptionalString('argocd.username') ?? 'argocdUsername';
|
||||
const argoPassword =
|
||||
config.getOptionalString('argocd.password') ?? 'argocdPassword';
|
||||
|
||||
const argoSvc = new ArgoService(
|
||||
argoUserName,
|
||||
argoPassword,
|
||||
config,
|
||||
logger,
|
||||
);
|
||||
|
||||
const argocdConfig = config
|
||||
.getConfigArray('argocd.appLocatorMethods')
|
||||
.filter(element => element.getString('type') === 'config')
|
||||
.reduce(
|
||||
(acc: Config[], argoApp: Config) =>
|
||||
acc.concat(argoApp.getConfigArray('instances')),
|
||||
[],
|
||||
)
|
||||
.map(instance => ({
|
||||
name: instance.getString('name'),
|
||||
url: instance.getString('url'),
|
||||
token: instance.getOptionalString('token'),
|
||||
username: instance.getOptionalString('username'),
|
||||
password: instance.getOptionalString('password'),
|
||||
}));
|
||||
const matchedArgoInstance = argocdConfig.find(
|
||||
argoHost => argoHost.name === argoInstance,
|
||||
);
|
||||
if (!matchedArgoInstance) {
|
||||
throw new Error(`Unable to find Argo instance named "${argoInstance}"`);
|
||||
}
|
||||
const token =
|
||||
matchedArgoInstance.token ||
|
||||
(await argoSvc.getArgoToken(matchedArgoInstance));
|
||||
|
||||
await argoSvc.createArgoApplication({
|
||||
baseUrl: matchedArgoInstance.url,
|
||||
argoToken: token,
|
||||
appName: appName,
|
||||
projectName: projectName ? projectName : appName,
|
||||
namespace: appNamespace,
|
||||
sourceRepo: repoUrl,
|
||||
sourcePath: path,
|
||||
labelValue: labelValue ? labelValue : appName,
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
export { argocdPlugin as default } from './argocd';
|
|
@ -1,68 +1,45 @@
|
|||
import {
|
||||
createRouter,
|
||||
providers,
|
||||
defaultAuthProviderFactories,
|
||||
} from '@backstage/plugin-auth-backend';
|
||||
import { Router } from 'express';
|
||||
import { PluginEnvironment } from '../types';
|
||||
import {
|
||||
DEFAULT_NAMESPACE,
|
||||
stringifyEntityRef,
|
||||
} from '@backstage/catalog-model';
|
||||
import { JsonArray } from '@backstage/types';
|
||||
import { createBackendModule } from '@backstage/backend-plugin-api';
|
||||
import {
|
||||
authProvidersExtensionPoint,
|
||||
createOAuthProviderFactory,
|
||||
OAuthAuthenticatorResult,
|
||||
} from '@backstage/plugin-auth-node';
|
||||
import {
|
||||
oidcAuthenticator,
|
||||
OidcAuthResult,
|
||||
} from '@backstage/plugin-auth-backend-module-oidc-provider';
|
||||
|
||||
export const authModuleKeycloakOIDCProvider = createBackendModule({
|
||||
pluginId: 'auth',
|
||||
moduleId: 'keycloak-oidc',
|
||||
register(reg) {
|
||||
reg.registerInit({
|
||||
deps: {
|
||||
providers: authProvidersExtensionPoint,
|
||||
},
|
||||
async init({ providers }) {
|
||||
providers.registerProvider({
|
||||
providerId: 'keycloak-oidc',
|
||||
factory: createOAuthProviderFactory({
|
||||
authenticator: oidcAuthenticator,
|
||||
profileTransform: async (
|
||||
input: OAuthAuthenticatorResult<OidcAuthResult>,
|
||||
) => ({
|
||||
profile: {
|
||||
email: input.fullProfile.userinfo.email,
|
||||
picture: input.fullProfile.userinfo.picture,
|
||||
displayName: input.fullProfile.userinfo.name,
|
||||
export default async function createPlugin(
|
||||
env: PluginEnvironment,
|
||||
): Promise<Router> {
|
||||
return await createRouter({
|
||||
logger: env.logger,
|
||||
config: env.config,
|
||||
database: env.database,
|
||||
discovery: env.discovery,
|
||||
tokenManager: env.tokenManager,
|
||||
providerFactories: {
|
||||
...defaultAuthProviderFactories,
|
||||
'keycloak-oidc': providers.oidc.create({
|
||||
signIn: {
|
||||
resolver(info, ctx) {
|
||||
const userRef = stringifyEntityRef({
|
||||
kind: 'User',
|
||||
name: info.result.userinfo.sub,
|
||||
namespace: DEFAULT_NAMESPACE,
|
||||
});
|
||||
return ctx.issueToken({
|
||||
claims: {
|
||||
sub: userRef,
|
||||
ent: [userRef],
|
||||
groups: (info.result.userinfo.groups as JsonArray) || [],
|
||||
},
|
||||
}),
|
||||
async signInResolver(info, ctx) {
|
||||
const { profile } = info;
|
||||
if (!profile.displayName) {
|
||||
throw new Error(
|
||||
'Login failed, user profile does not contain a valid name',
|
||||
);
|
||||
}
|
||||
// should use users from catalog
|
||||
const userRef = stringifyEntityRef({
|
||||
kind: 'User',
|
||||
name: info.profile.displayName!,
|
||||
namespace: DEFAULT_NAMESPACE,
|
||||
});
|
||||
|
||||
return ctx.issueToken({
|
||||
claims: {
|
||||
sub: userRef,
|
||||
ent: [userRef],
|
||||
groups:
|
||||
(info.result.fullProfile.userinfo.groups as JsonArray) ||
|
||||
[],
|
||||
},
|
||||
});
|
||||
},
|
||||
}),
|
||||
});
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
});
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
14
packages/backend/src/plugins/catalog.ts
Normal file
14
packages/backend/src/plugins/catalog.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
import { CatalogBuilder } from '@backstage/plugin-catalog-backend';
|
||||
import { ScaffolderEntitiesProcessor } from '@backstage/plugin-scaffolder-backend';
|
||||
import { Router } from 'express';
|
||||
import { PluginEnvironment } from '../types';
|
||||
|
||||
export default async function createPlugin(
|
||||
env: PluginEnvironment,
|
||||
): Promise<Router> {
|
||||
const builder = await CatalogBuilder.create(env);
|
||||
builder.addProcessor(new ScaffolderEntitiesProcessor());
|
||||
const { processingEngine, router } = await builder.build();
|
||||
await processingEngine.start();
|
||||
return router;
|
||||
}
|
|
@ -1,600 +0,0 @@
|
|||
import { InputError } from '@backstage/errors';
|
||||
import { Config } from '@backstage/config';
|
||||
import {
|
||||
getGiteaRequestOptions,
|
||||
GiteaIntegrationConfig,
|
||||
ScmIntegrationRegistry,
|
||||
ScmIntegrations,
|
||||
} from '@backstage/integration';
|
||||
import {
|
||||
createTemplateAction,
|
||||
getRepoSourceDirectory,
|
||||
initRepoAndPush,
|
||||
TemplateExample,
|
||||
} from '@backstage/plugin-scaffolder-node';
|
||||
import crypto from 'crypto';
|
||||
import yaml from 'yaml';
|
||||
|
||||
export const examples: TemplateExample[] = [
|
||||
{
|
||||
description:
|
||||
'Initializes a Gitea repository using the content of the workspace and publish it to Gitea with default configuration.',
|
||||
example: yaml.stringify({
|
||||
steps: [
|
||||
{
|
||||
id: 'publish',
|
||||
action: 'publish:gitea',
|
||||
name: 'Publish to Gitea',
|
||||
input: {
|
||||
repoUrl: 'gitea.com?repo=repo&owner=owner',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
description: 'Initializes a Gitea repository with a description.',
|
||||
example: yaml.stringify({
|
||||
steps: [
|
||||
{
|
||||
id: 'publish',
|
||||
action: 'publish:gitea',
|
||||
name: 'Publish to Gitea',
|
||||
input: {
|
||||
repoUrl: 'gitea.com?repo=repo&owner=owner',
|
||||
description: 'Initialize a gitea repository',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
description:
|
||||
'Initializes a Gitea repository with a default Branch, if not set defaults to main',
|
||||
example: yaml.stringify({
|
||||
steps: [
|
||||
{
|
||||
id: 'publish',
|
||||
action: 'publish:gitea',
|
||||
name: 'Publish to Gitea',
|
||||
input: {
|
||||
repoUrl: 'gitea.com?repo=repo&owner=owner',
|
||||
defaultBranch: 'main',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
description:
|
||||
'Initializes a Gitea repository with an initial commit message, if not set defaults to initial commit',
|
||||
example: yaml.stringify({
|
||||
steps: [
|
||||
{
|
||||
id: 'publish',
|
||||
action: 'publish:gitea',
|
||||
name: 'Publish to Gitea',
|
||||
input: {
|
||||
repoUrl: 'gitea.com?repo=repo&owner=owner',
|
||||
gitCommitMessage: 'Initial Commit Message',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
description:
|
||||
'Initializes a Gitea repository with a repo Author Name, if not set defaults to Scaffolder',
|
||||
example: yaml.stringify({
|
||||
steps: [
|
||||
{
|
||||
id: 'publish',
|
||||
action: 'publish:gitea',
|
||||
name: 'Publish to Gitea',
|
||||
input: {
|
||||
repoUrl: 'gitea.com?repo=repo&owner=owner',
|
||||
gitAuthorName: 'John Doe',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
description: 'Initializes a Gitea repository with a repo Author Email',
|
||||
example: yaml.stringify({
|
||||
steps: [
|
||||
{
|
||||
id: 'publish',
|
||||
action: 'publish:gitea',
|
||||
name: 'Publish to Gitea',
|
||||
input: {
|
||||
repoUrl: 'gitea.com?repo=repo&owner=owner',
|
||||
gitAuthorEmail: 'johndoe@email.com',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
description:
|
||||
'Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository',
|
||||
example: yaml.stringify({
|
||||
steps: [
|
||||
{
|
||||
id: 'publish',
|
||||
action: 'publish:gitea',
|
||||
name: 'Publish to Gitea',
|
||||
input: {
|
||||
repoUrl: 'gitea.com?repo=repo&owner=owner',
|
||||
sourcePath: 'repository/',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
description: 'Initializes a Gitea repository with all properties being set',
|
||||
example: yaml.stringify({
|
||||
steps: [
|
||||
{
|
||||
id: 'publish',
|
||||
action: 'publish:gitea',
|
||||
name: 'Publish to Gitea',
|
||||
input: {
|
||||
repoUrl: 'gitea.com?repo=repo&owner=owner',
|
||||
description: 'Initialize a gitea repository',
|
||||
defaultBranch: 'staging',
|
||||
gitCommitMessage: 'Initial Commit Message',
|
||||
gitAuthorName: 'John Doe',
|
||||
gitAuthorEmail: 'johndoe@email.com',
|
||||
sourcePath: 'repository/',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
];
|
||||
|
||||
const parseRepoUrl = (
|
||||
repoUrl: string,
|
||||
integrations: ScmIntegrationRegistry,
|
||||
): {
|
||||
repo: string;
|
||||
host: string;
|
||||
owner?: string;
|
||||
organization?: string;
|
||||
workspace?: string;
|
||||
project?: string;
|
||||
} => {
|
||||
let parsed;
|
||||
try {
|
||||
parsed = new URL(`https://${repoUrl}`);
|
||||
} catch (error) {
|
||||
throw new InputError(
|
||||
`Invalid repo URL passed to publisher, got ${repoUrl}, ${error}`,
|
||||
);
|
||||
}
|
||||
const host = parsed.host;
|
||||
const owner = parsed.searchParams.get('owner') ?? undefined;
|
||||
const organization = parsed.searchParams.get('organization') ?? undefined;
|
||||
const workspace = parsed.searchParams.get('workspace') ?? undefined;
|
||||
const project = parsed.searchParams.get('project') ?? undefined;
|
||||
|
||||
const type = integrations.byHost(host)?.type;
|
||||
|
||||
if (!type) {
|
||||
throw new InputError(
|
||||
`No matching integration configuration for host ${host}, please check your integrations config`,
|
||||
);
|
||||
}
|
||||
|
||||
const repo: string = parsed.searchParams.get('repo')!;
|
||||
switch (type) {
|
||||
case 'bitbucket': {
|
||||
if (host === 'www.bitbucket.org') {
|
||||
checkRequiredParams(parsed, 'workspace');
|
||||
}
|
||||
checkRequiredParams(parsed, 'project', 'repo');
|
||||
break;
|
||||
}
|
||||
case 'gitlab': {
|
||||
// project is the projectID, and if defined, owner and repo won't be needed.
|
||||
if (!project) {
|
||||
checkRequiredParams(parsed, 'owner', 'repo');
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'gitea': {
|
||||
checkRequiredParams(parsed, 'repo');
|
||||
break;
|
||||
}
|
||||
case 'gerrit': {
|
||||
checkRequiredParams(parsed, 'repo');
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
checkRequiredParams(parsed, 'repo', 'owner');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return { host, owner, repo, organization, workspace, project };
|
||||
};
|
||||
|
||||
function checkRequiredParams(repoUrl: URL, ...params: string[]) {
|
||||
for (let i = 0; i < params.length; i++) {
|
||||
if (!repoUrl.searchParams.get(params[i])) {
|
||||
throw new InputError(
|
||||
`Invalid repo URL passed to publisher: ${repoUrl.toString()}, missing ${
|
||||
params[i]
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
// const checkGiteaContentUrl = async (
|
||||
// config: GiteaIntegrationConfig,
|
||||
// options: {
|
||||
// owner?: string;
|
||||
// repo: string;
|
||||
// defaultBranch?: string;
|
||||
// },
|
||||
// ): Promise<Response> => {
|
||||
// const { owner, repo, defaultBranch } = options;
|
||||
// let response: Response;
|
||||
// const getOptions: RequestInit = {
|
||||
// method: 'GET',
|
||||
// };
|
||||
//
|
||||
// try {
|
||||
// response = await fetch(
|
||||
// `${config.baseUrl}/${owner}/${repo}/src/branch/${defaultBranch}`,
|
||||
// getOptions,
|
||||
// );
|
||||
// } catch (e) {
|
||||
// throw new Error(
|
||||
// `Unable to get the repository: ${owner}/${repo} metadata , ${e}`,
|
||||
// );
|
||||
// }
|
||||
// return response;
|
||||
// };
|
||||
|
||||
const checkGiteaOrg = async (
|
||||
config: GiteaIntegrationConfig,
|
||||
options: {
|
||||
owner: string;
|
||||
},
|
||||
): Promise<void> => {
|
||||
const { owner } = options;
|
||||
let response: Response;
|
||||
// check first if the org = owner exists
|
||||
const getOptions: RequestInit = {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
...getGiteaRequestOptions(config).headers,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
try {
|
||||
response = await fetch(
|
||||
`${config.baseUrl}/api/v1/orgs/${owner}`,
|
||||
getOptions,
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error(`Unable to get the Organization: ${owner}, ${e}`);
|
||||
}
|
||||
if (response.status !== 200) {
|
||||
throw new Error(
|
||||
`Organization ${owner} do not exist. Please create it first !`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const createGiteaProject = async (
|
||||
config: GiteaIntegrationConfig,
|
||||
options: {
|
||||
projectName: string;
|
||||
owner?: string;
|
||||
description: string;
|
||||
},
|
||||
): Promise<void> => {
|
||||
const { projectName, description, owner } = options;
|
||||
|
||||
/*
|
||||
Several options exist to create a repository using either the user or organisation
|
||||
User: https://gitea.com/api/swagger#/user/createCurrentUserRepo
|
||||
Api: URL/api/v1/user/repos
|
||||
Remark: The user is the username defined part of the backstage integration config for the gitea URL !
|
||||
|
||||
Org: https://gitea.com/api/swagger#/organization/createOrgRepo
|
||||
Api: URL/api/v1/orgs/${org_owner}/repos
|
||||
This is the default scenario that we support currently
|
||||
*/
|
||||
let response: Response;
|
||||
|
||||
const postOptions: RequestInit = {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
name: projectName,
|
||||
description,
|
||||
}),
|
||||
headers: {
|
||||
...getGiteaRequestOptions(config).headers,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
if (owner) {
|
||||
try {
|
||||
response = await fetch(
|
||||
`${config.baseUrl}/api/v1/orgs/${owner}/repos`,
|
||||
postOptions,
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error(`Unable to create repository, ${e}`);
|
||||
}
|
||||
if (response.status !== 201) {
|
||||
throw new Error(
|
||||
`Unable to create repository, ${response.status} ${
|
||||
response.statusText
|
||||
}, ${await response.text()}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
response = await fetch(
|
||||
`${config.baseUrl}/api/v1/user/repos`,
|
||||
postOptions,
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error(`Unable to create repository, ${e}`);
|
||||
}
|
||||
if (response.status !== 201) {
|
||||
throw new Error(
|
||||
`Unable to create repository, ${response.status} ${
|
||||
response.statusText
|
||||
}, ${await response.text()}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const generateCommitMessage = (
|
||||
config: Config,
|
||||
commitSubject?: string,
|
||||
): string => {
|
||||
const changeId = crypto.randomBytes(20).toString('hex');
|
||||
const msg = `${
|
||||
config.getOptionalString('scaffolder.defaultCommitMessage') || commitSubject
|
||||
}\n\nChange-Id: I${changeId}`;
|
||||
return msg;
|
||||
};
|
||||
|
||||
// async function checkDurationLimit(fn: () => void, timeLimit: number): Promise<boolean> {
|
||||
//
|
||||
// const startTime = process.hrtime();
|
||||
//
|
||||
// // Call the function
|
||||
// await fn();
|
||||
//
|
||||
// const endTime = process.hrtime(startTime);
|
||||
// const durationInMs = endTime[0] * 1000 + endTime[1] / 1e6;
|
||||
//
|
||||
// // Check if the duration exceeds the time limit
|
||||
// return durationInMs <= timeLimit;
|
||||
// }
|
||||
//
|
||||
// async function checkAvailabilityGiteaRepository(
|
||||
// integrationConfig: GiteaIntegrationConfig,
|
||||
// options: {
|
||||
// owner?: string;
|
||||
// repo: string;
|
||||
// defaultBranch: string;
|
||||
// ctx: ActionContext<any>;
|
||||
// },
|
||||
// ) {
|
||||
// const { owner, repo, defaultBranch, ctx } = options;
|
||||
// const sleep = (ms: number | undefined) => new Promise(r => setTimeout(r, ms));
|
||||
// let response: Response;
|
||||
//
|
||||
// const p = new Promise<void>((resolve, reject) => {
|
||||
// setTimeout(async () => {
|
||||
// response = await checkGiteaContentUrl(integrationConfig, {
|
||||
// owner,
|
||||
// repo,
|
||||
// defaultBranch,
|
||||
// });
|
||||
//
|
||||
// while (response.status !== 200) {
|
||||
// if (ctx.signal?.aborted) return;
|
||||
// await sleep(1000);
|
||||
// response = await checkGiteaContentUrl(integrationConfig, {
|
||||
// owner,
|
||||
// repo,
|
||||
// defaultBranch,
|
||||
// });
|
||||
// }
|
||||
// resolve()
|
||||
// },
|
||||
// 5000
|
||||
// )
|
||||
// })
|
||||
// return p
|
||||
//
|
||||
// }
|
||||
|
||||
/**
|
||||
* Creates a new action that initializes a git repository using the content of the workspace.
|
||||
* and publishes it to a Gitea instance.
|
||||
* @public
|
||||
*/
|
||||
export function createPublishGiteaAction(options: {
|
||||
integrations: ScmIntegrations;
|
||||
config: Config;
|
||||
}) {
|
||||
const { integrations, config } = options;
|
||||
|
||||
return createTemplateAction<{
|
||||
repoUrl: string;
|
||||
description: string;
|
||||
defaultBranch?: string;
|
||||
gitCommitMessage?: string;
|
||||
gitAuthorName?: string;
|
||||
gitAuthorEmail?: string;
|
||||
sourcePath?: string;
|
||||
}>({
|
||||
id: 'publish:gitea',
|
||||
description:
|
||||
'Initializes a git repository using the content of the workspace, and publishes it to Gitea.',
|
||||
examples,
|
||||
schema: {
|
||||
input: {
|
||||
type: 'object',
|
||||
required: ['repoUrl'],
|
||||
properties: {
|
||||
repoUrl: {
|
||||
title: 'Repository Location',
|
||||
type: 'string',
|
||||
},
|
||||
description: {
|
||||
title: 'Repository Description',
|
||||
type: 'string',
|
||||
},
|
||||
defaultBranch: {
|
||||
title: 'Default Branch',
|
||||
type: 'string',
|
||||
description: `Sets the default branch on the repository. The default value is 'main'`,
|
||||
},
|
||||
gitCommitMessage: {
|
||||
title: 'Git Commit Message',
|
||||
type: 'string',
|
||||
description: `Sets the commit message on the repository. The default value is 'initial commit'`,
|
||||
},
|
||||
gitAuthorName: {
|
||||
title: 'Default Author Name',
|
||||
type: 'string',
|
||||
description: `Sets the default author name for the commit. The default value is 'Scaffolder'`,
|
||||
},
|
||||
gitAuthorEmail: {
|
||||
title: 'Default Author Email',
|
||||
type: 'string',
|
||||
description: `Sets the default author email for the commit.`,
|
||||
},
|
||||
sourcePath: {
|
||||
title: 'Source Path',
|
||||
type: 'string',
|
||||
description: `Path within the workspace that will be used as the repository root. If omitted, the entire workspace will be published as the repository.`,
|
||||
},
|
||||
},
|
||||
},
|
||||
output: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
remoteUrl: {
|
||||
title: 'A URL to the repository with the provider',
|
||||
type: 'string',
|
||||
},
|
||||
repoContentsUrl: {
|
||||
title: 'A URL to the root of the repository',
|
||||
type: 'string',
|
||||
},
|
||||
commitHash: {
|
||||
title: 'The git commit hash of the initial commit',
|
||||
type: 'string',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async handler(ctx) {
|
||||
const {
|
||||
repoUrl,
|
||||
description,
|
||||
defaultBranch = 'main',
|
||||
gitAuthorName,
|
||||
gitAuthorEmail,
|
||||
gitCommitMessage = 'initial commit',
|
||||
sourcePath,
|
||||
} = ctx.input;
|
||||
const { repo, host, owner } = parseRepoUrl(repoUrl, integrations);
|
||||
const integrationConfig = integrations.gitea.byHost(host);
|
||||
if (!integrationConfig) {
|
||||
throw new InputError(
|
||||
`No matching integration configuration for host ${host}, please check your integrations config`,
|
||||
);
|
||||
}
|
||||
const { username, password } = integrationConfig.config;
|
||||
|
||||
if (!username || !password) {
|
||||
throw new Error('Credentials for the gitea ${host} required.');
|
||||
}
|
||||
|
||||
// check if the org exists within the gitea server
|
||||
if (owner && owner !== username) {
|
||||
await checkGiteaOrg(integrationConfig.config, { owner });
|
||||
}
|
||||
|
||||
await createGiteaProject(integrationConfig.config, {
|
||||
description,
|
||||
owner: owner,
|
||||
projectName: repo,
|
||||
});
|
||||
|
||||
const auth = {
|
||||
username: username,
|
||||
password: password,
|
||||
};
|
||||
const gitAuthorInfo = {
|
||||
name: gitAuthorName
|
||||
? gitAuthorName
|
||||
: config.getOptionalString('scaffolder.defaultAuthor.name'),
|
||||
email: gitAuthorEmail
|
||||
? gitAuthorEmail
|
||||
: config.getOptionalString('scaffolder.defaultAuthor.email'),
|
||||
};
|
||||
// The owner to be used should be either the org name or user authenticated with the gitea server
|
||||
const repoOwner = owner ? owner : username;
|
||||
const remoteUrl = `${integrationConfig.config.baseUrl}/${repoOwner}/${repo}.git`;
|
||||
const commitResult = await initRepoAndPush({
|
||||
dir: getRepoSourceDirectory(ctx.workspacePath, sourcePath),
|
||||
remoteUrl,
|
||||
auth,
|
||||
defaultBranch,
|
||||
logger: ctx.logger,
|
||||
commitMessage: generateCommitMessage(config, gitCommitMessage),
|
||||
gitAuthorInfo,
|
||||
});
|
||||
|
||||
// Check if the gitea repo URL is available before to exit
|
||||
const operationTimeLimit = 5000; // 20 seconds
|
||||
const sleep = (ms: number | undefined) =>
|
||||
new Promise(r => setTimeout(r, ms));
|
||||
await sleep(operationTimeLimit);
|
||||
// await checkAvailabilityGiteaRepository(
|
||||
// integrationConfig.config, {
|
||||
// repoOwner,
|
||||
// repo,
|
||||
// defaultBranch,
|
||||
// ctx,
|
||||
// }
|
||||
// )
|
||||
// const checkDuration = await checkDurationLimit(
|
||||
// async () =>
|
||||
// await checkAvailabilityGiteaRepository(integrationConfig.config, {
|
||||
// repoOwner,
|
||||
// repo,
|
||||
// defaultBranch,
|
||||
// ctx,
|
||||
// }),
|
||||
// operationTimeLimit,
|
||||
// );
|
||||
//
|
||||
// if (!checkDuration) {
|
||||
// console.log('Operation exceeded the time limit.');
|
||||
// }
|
||||
|
||||
const repoContentsUrl = `${integrationConfig.config.baseUrl}/${repoOwner}/${repo}/src/branch/${defaultBranch}/`;
|
||||
ctx.output('remoteUrl', remoteUrl);
|
||||
ctx.output('commitHash', commitResult?.commitHash);
|
||||
ctx.output('repoContentsUrl', repoContentsUrl);
|
||||
},
|
||||
});
|
||||
}
|
|
@ -1,255 +0,0 @@
|
|||
import {
|
||||
createTemplateAction,
|
||||
executeShellCommand,
|
||||
} from '@backstage/plugin-scaffolder-node';
|
||||
import { dumpYaml } from '@kubernetes/client-node';
|
||||
import yaml from 'js-yaml';
|
||||
import { Config } from '@backstage/config';
|
||||
import { resolveSafeChildPath } from '@backstage/backend-common';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
interface Cluster {
|
||||
name: string;
|
||||
cluster: {
|
||||
server: string;
|
||||
'insecure-skip-tls-verify': boolean;
|
||||
'certificate-authority-data'?: string;
|
||||
'certificate-authority'?: string;
|
||||
};
|
||||
}
|
||||
interface Context {
|
||||
name: string;
|
||||
context: {
|
||||
cluster: string;
|
||||
user: string;
|
||||
};
|
||||
}
|
||||
interface User {
|
||||
name: string;
|
||||
user: {
|
||||
token?: string;
|
||||
};
|
||||
}
|
||||
interface ConfFile {
|
||||
apiVersion: string;
|
||||
kind: string;
|
||||
'current-context': string;
|
||||
contexts: Context[];
|
||||
clusters: Cluster[];
|
||||
users: User[];
|
||||
}
|
||||
export const createKubernetesApply = (config: Config) => {
|
||||
return createTemplateAction<{
|
||||
manifestString?: string;
|
||||
manifestObject?: any;
|
||||
manifestPath?: string;
|
||||
namespaced: boolean;
|
||||
clusterName?: string;
|
||||
}>({
|
||||
id: 'cnoe:kubernetes:apply',
|
||||
schema: {
|
||||
input: {
|
||||
type: 'object',
|
||||
required: ['namespaced'],
|
||||
properties: {
|
||||
manifestString: {
|
||||
type: 'string',
|
||||
title: 'Manifest',
|
||||
description:
|
||||
'The manifest to apply in the cluster. Must be a string',
|
||||
},
|
||||
manifestObject: {
|
||||
type: 'object',
|
||||
title: 'Manifest',
|
||||
description:
|
||||
'The manifest to apply in the cluster. Must be an object',
|
||||
},
|
||||
manifestPath: {
|
||||
type: 'string',
|
||||
title: 'Path to the manifest file',
|
||||
description: 'The path to the manifest file.',
|
||||
},
|
||||
namespaced: {
|
||||
type: 'boolean',
|
||||
title: 'Namespaced',
|
||||
description: 'Whether the API is namespaced or not',
|
||||
},
|
||||
clusterName: {
|
||||
type: 'string',
|
||||
title: 'Cluster Name',
|
||||
description: 'The name of the cluster to apply this',
|
||||
},
|
||||
},
|
||||
},
|
||||
output: {
|
||||
type: 'object',
|
||||
title: 'Returned object',
|
||||
description:
|
||||
'The object returned by Kubernetes by performing this operation',
|
||||
},
|
||||
},
|
||||
async handler(ctx) {
|
||||
let manifestPath = resolveSafeChildPath(
|
||||
ctx.workspacePath,
|
||||
'to-be-applied.yaml',
|
||||
);
|
||||
if (ctx.input.manifestString) {
|
||||
fs.writeFileSync(manifestPath, ctx.input.manifestString, {
|
||||
encoding: 'utf8',
|
||||
mode: '600',
|
||||
});
|
||||
} else if (ctx.input.manifestObject) {
|
||||
fs.writeFileSync(manifestPath, yaml.dump(ctx.input.manifestObject), {
|
||||
encoding: 'utf8',
|
||||
mode: '600',
|
||||
});
|
||||
} else {
|
||||
const filePath = resolveSafeChildPath(
|
||||
ctx.workspacePath,
|
||||
ctx.input.manifestPath!,
|
||||
);
|
||||
manifestPath = filePath;
|
||||
}
|
||||
const fileContent = fs.readFileSync(manifestPath, 'utf8');
|
||||
const objList: any[] = yaml.loadAll(fileContent);
|
||||
|
||||
if (ctx.input.clusterName) {
|
||||
// Supports SA token authentication only
|
||||
const targetCluster = getClusterConfig(ctx.input.clusterName!, config);
|
||||
const confFile: ConfFile = {
|
||||
apiVersion: 'v1',
|
||||
kind: 'Config',
|
||||
'current-context': ctx.input.clusterName,
|
||||
contexts: [
|
||||
{
|
||||
name: ctx.input.clusterName,
|
||||
context: {
|
||||
cluster: ctx.input.clusterName,
|
||||
user: ctx.input.clusterName,
|
||||
},
|
||||
},
|
||||
],
|
||||
clusters: [
|
||||
{
|
||||
name: ctx.input.clusterName,
|
||||
cluster: {
|
||||
server: targetCluster.getString('url'),
|
||||
'insecure-skip-tls-verify':
|
||||
!!targetCluster.getOptionalBoolean('skipTLSVerify'),
|
||||
},
|
||||
},
|
||||
],
|
||||
users: [
|
||||
{
|
||||
name: ctx.input.clusterName,
|
||||
user: {
|
||||
token: targetCluster.getString('serviceAccountToken'),
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
if (!confFile.clusters[0].cluster['insecure-skip-tls-verify']) {
|
||||
let caDataRaw = targetCluster.getOptionalString('caData');
|
||||
if (caDataRaw?.startsWith('-----BEGIN CERTIFICATE-----')) {
|
||||
caDataRaw = Buffer.from(
|
||||
targetCluster.getString('caData'),
|
||||
'utf8',
|
||||
).toString('base64');
|
||||
}
|
||||
confFile.clusters[0].cluster['certificate-authority-data'] =
|
||||
caDataRaw;
|
||||
if (
|
||||
targetCluster.getOptionalString('caFile') &&
|
||||
!(
|
||||
targetCluster.getOptionalString('caFile')?.length === 0 ||
|
||||
targetCluster.getOptionalString('caFile') === null
|
||||
)
|
||||
) {
|
||||
confFile.clusters[0].cluster['certificate-authority'] =
|
||||
targetCluster.getString('caFile');
|
||||
}
|
||||
}
|
||||
|
||||
const confString = dumpYaml(confFile);
|
||||
const confFilePath = resolveSafeChildPath(ctx.workspacePath, 'config');
|
||||
fs.writeFileSync(confFilePath, confString, {
|
||||
encoding: 'utf8',
|
||||
mode: '600',
|
||||
});
|
||||
await executeShellCommand({
|
||||
command: 'cat',
|
||||
args: [confFilePath],
|
||||
logStream: ctx.logStream,
|
||||
});
|
||||
await executeShellCommand({
|
||||
command: 'cat',
|
||||
args: [manifestPath],
|
||||
logStream: ctx.logStream,
|
||||
});
|
||||
let counter = 1;
|
||||
for (const obj of objList) {
|
||||
let manifestFilePath = resolveSafeChildPath(
|
||||
ctx.workspacePath,
|
||||
'to-be-applied-' + counter.toString() + '.yaml',
|
||||
);
|
||||
fs.writeFileSync(manifestFilePath, yaml.dump(obj), {
|
||||
encoding: 'utf8',
|
||||
mode: '600',
|
||||
});
|
||||
if (obj.metadata.generateName !== undefined) {
|
||||
await executeShellCommand({
|
||||
command: 'kubectl',
|
||||
args: [
|
||||
'--kubeconfig',
|
||||
confFilePath,
|
||||
'create',
|
||||
'-f',
|
||||
manifestFilePath,
|
||||
],
|
||||
logStream: ctx.logStream,
|
||||
});
|
||||
} else {
|
||||
await executeShellCommand({
|
||||
command: 'kubectl',
|
||||
args: [
|
||||
'--kubeconfig',
|
||||
confFilePath,
|
||||
'apply',
|
||||
'-f',
|
||||
manifestFilePath,
|
||||
],
|
||||
logStream: ctx.logStream,
|
||||
});
|
||||
}
|
||||
counter += 1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
throw new Error('please specify a valid cluster name');
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
// Finds the first cluster that matches the given name.
|
||||
function getClusterConfig(name: string, config: Config): Config {
|
||||
const clusterConfigs = config
|
||||
.getConfigArray('kubernetes.clusterLocatorMethods')
|
||||
.filter((val: Config) => {
|
||||
return val.getString('type') === 'config';
|
||||
});
|
||||
|
||||
const clusters = new Array<Config>();
|
||||
clusterConfigs.filter((conf: Config) => {
|
||||
const cluster = conf.getConfigArray('clusters').find((val: Config) => {
|
||||
return val.getString('name') === name;
|
||||
});
|
||||
if (cluster) {
|
||||
clusters.push(cluster);
|
||||
}
|
||||
});
|
||||
|
||||
if (clusters.length === 0) {
|
||||
throw new Error(`Cluster with name ${name} not found`);
|
||||
}
|
||||
return clusters[0];
|
||||
}
|
182
packages/backend/src/plugins/kubernetes-apply.ts
Normal file
182
packages/backend/src/plugins/kubernetes-apply.ts
Normal file
|
@ -0,0 +1,182 @@
|
|||
import { createTemplateAction } from '@backstage/plugin-scaffolder-node';
|
||||
import { KubeConfig, CustomObjectsApi } from '@kubernetes/client-node';
|
||||
import YAML from 'yaml';
|
||||
import { Config } from '@backstage/config';
|
||||
import { resolveSafeChildPath } from '@backstage/backend-common';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
export const kubernetesApply = (config: Config) => {
|
||||
return createTemplateAction<{
|
||||
manifestString?: string;
|
||||
manifestObject?: any;
|
||||
manifestPath?: string;
|
||||
namespaced: boolean;
|
||||
clusterName: string;
|
||||
}>({
|
||||
id: 'cnoe:kubernetes:apply',
|
||||
schema: {
|
||||
input: {
|
||||
type: 'object',
|
||||
required: ['namespaced'],
|
||||
properties: {
|
||||
manifestString: {
|
||||
type: 'string',
|
||||
title: 'Manifest',
|
||||
description: 'The manifest to apply in the cluster',
|
||||
},
|
||||
manifestObject: {
|
||||
type: 'object',
|
||||
title: 'Manifest',
|
||||
description: 'The manifest to apply in the cluster',
|
||||
},
|
||||
manifestPath: {
|
||||
type: 'string',
|
||||
title: 'Path to the manifest file',
|
||||
description: 'The path to the manifest file',
|
||||
},
|
||||
namespaced: {
|
||||
type: 'boolean',
|
||||
title: 'Namespaced',
|
||||
description: 'Whether the API is namespaced or not',
|
||||
},
|
||||
clusterName: {
|
||||
type: 'string',
|
||||
title: 'Cluster Name',
|
||||
description: 'The name of the cluster to apply this',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async handler(ctx) {
|
||||
let obj: any;
|
||||
if (ctx.input.manifestString) {
|
||||
obj = YAML.parse(ctx.input.manifestString);
|
||||
} else if (ctx.input.manifestObject) {
|
||||
obj = ctx.input.manifestObject;
|
||||
} else {
|
||||
const filePath = resolveSafeChildPath(
|
||||
ctx.workspacePath,
|
||||
ctx.input.manifestPath!,
|
||||
);
|
||||
const fileContent = fs.readFileSync(filePath, 'utf8');
|
||||
obj = YAML.parse(fileContent);
|
||||
}
|
||||
const words = obj.apiVersion.split('/');
|
||||
const group = words[0];
|
||||
const version = words[1];
|
||||
// hack. needs fixing to correctly extract the plurals
|
||||
const plural = `${obj.kind.toLowerCase()}s`;
|
||||
const targetCluster = getClusterConfig(ctx.input.clusterName!, config);
|
||||
// hack. needs fixing to get the KubeConfig info from app-config.yaml
|
||||
const kc = new KubeConfig();
|
||||
kc.addCluster({
|
||||
name: targetCluster.getString('name'),
|
||||
caData: targetCluster.getString('caData'),
|
||||
server: targetCluster.getString('url'),
|
||||
skipTLSVerify: targetCluster.getBoolean('skipTLSVerify'),
|
||||
});
|
||||
kc.addUser({
|
||||
name: 'scaffolder-user',
|
||||
token: targetCluster.getString('serviceAccountToken'),
|
||||
});
|
||||
kc.addContext({
|
||||
cluster: ctx.input.clusterName,
|
||||
user: 'scaffolder-user',
|
||||
name: ctx.input.clusterName,
|
||||
});
|
||||
kc.setCurrentContext(ctx.input.clusterName);
|
||||
|
||||
const client = kc.makeApiClient(CustomObjectsApi);
|
||||
// Server-side apply.
|
||||
if (ctx.input.namespaced) {
|
||||
await client
|
||||
.patchNamespacedCustomObject(
|
||||
group,
|
||||
version,
|
||||
obj.metadata.namespace,
|
||||
plural,
|
||||
obj.metadata.name,
|
||||
obj,
|
||||
undefined,
|
||||
'backstage',
|
||||
true,
|
||||
{ headers: { 'Content-Type': 'application/apply-patch+yaml' } },
|
||||
)
|
||||
.then(
|
||||
resp => {
|
||||
ctx.logger.info(
|
||||
`Successfully created ${obj.metadata.namespace}/${obj.metadata.name} Application: HTTP ${resp.response.statusCode}`,
|
||||
);
|
||||
},
|
||||
err => {
|
||||
ctx.logger.error(
|
||||
`Failed to make PATCH call for ${obj.metadata.namespace}/${
|
||||
obj.metadata.name
|
||||
} Application: Body ${JSON.stringify(
|
||||
err.body,
|
||||
null,
|
||||
2,
|
||||
)} Response ${JSON.stringify(err.response, null, 2)}.`,
|
||||
);
|
||||
throw err;
|
||||
},
|
||||
);
|
||||
return;
|
||||
}
|
||||
await client
|
||||
.patchClusterCustomObject(
|
||||
group,
|
||||
version,
|
||||
plural,
|
||||
obj.metadata.name,
|
||||
obj,
|
||||
undefined,
|
||||
'backstage',
|
||||
true,
|
||||
{ headers: { 'Content-Type': 'application/apply-patch+yaml' } },
|
||||
)
|
||||
.then(
|
||||
resp => {
|
||||
ctx.logger.info(
|
||||
`Successfully created ${obj.metadata.name} Application: HTTP ${resp.response.statusCode}`,
|
||||
);
|
||||
},
|
||||
err => {
|
||||
ctx.logger.error(
|
||||
`Failed to make PATCH call for ${
|
||||
obj.metadata.name
|
||||
} Application: Body ${JSON.stringify(
|
||||
err.body,
|
||||
null,
|
||||
2,
|
||||
)} Response ${JSON.stringify(err.response, null, 2)}.`,
|
||||
);
|
||||
throw err;
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
function getClusterConfig(name: string, config: Config): Config {
|
||||
const clusterConfigs = config
|
||||
.getConfigArray('kubernetes.clusterLocatorMethods')
|
||||
.filter((val: Config) => {
|
||||
return val.getString('type') === 'config';
|
||||
});
|
||||
|
||||
const clusters = new Array<Config>();
|
||||
clusterConfigs.filter((conf: Config) => {
|
||||
const cluster = conf.getConfigArray('clusters').find((val: Config) => {
|
||||
return val.getString('name') === name;
|
||||
});
|
||||
if (cluster) {
|
||||
clusters.push(cluster);
|
||||
}
|
||||
});
|
||||
|
||||
if (clusters.length === 0) {
|
||||
throw new Error(`Cluster with name ${name} not found`);
|
||||
}
|
||||
return clusters[0];
|
||||
}
|
18
packages/backend/src/plugins/kubernetes.ts
Normal file
18
packages/backend/src/plugins/kubernetes.ts
Normal file
|
@ -0,0 +1,18 @@
|
|||
import { KubernetesBuilder } from '@backstage/plugin-kubernetes-backend';
|
||||
import { Router } from 'express';
|
||||
import { PluginEnvironment } from '../types';
|
||||
import { CatalogClient } from '@backstage/catalog-client';
|
||||
|
||||
export default async function createPlugin(
|
||||
env: PluginEnvironment,
|
||||
): Promise<Router> {
|
||||
const catalogApi = new CatalogClient({ discoveryApi: env.discovery });
|
||||
const { router } = await KubernetesBuilder.createBuilder({
|
||||
logger: env.logger,
|
||||
config: env.config,
|
||||
catalogApi,
|
||||
permissions: env.permissions,
|
||||
}).build();
|
||||
|
||||
return router;
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
import { createTemplateAction } from '@backstage/plugin-scaffolder-node';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
// Add type annotations to fix TS2742
|
||||
type SanitizeResourceInput = {
|
||||
document: string;
|
||||
};
|
||||
|
||||
type SanitizeResourceOutput = {
|
||||
sanitized: string;
|
||||
};
|
||||
|
||||
export const createSanitizeResource = () => {
|
||||
return createTemplateAction<SanitizeResourceInput, SanitizeResourceOutput>({
|
||||
id: 'cnoe:utils:sanitize',
|
||||
schema: {
|
||||
input: {
|
||||
type: 'object',
|
||||
required: ['document'],
|
||||
properties: {
|
||||
document: {
|
||||
type: 'string',
|
||||
title: 'Document',
|
||||
description: 'The document to be sanitized',
|
||||
},
|
||||
},
|
||||
},
|
||||
output: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
sanitized: {
|
||||
type: 'string',
|
||||
description: 'The sanitized yaml string',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async handler(ctx) {
|
||||
const obj = yaml.load(ctx.input.document);
|
||||
ctx.output('sanitized', yaml.dump(removeEmptyObjects(obj)));
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
// Remove empty elements from an object
|
||||
function removeEmptyObjects(obj: any): any {
|
||||
if (typeof obj !== 'object' || obj === null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const newObj: any = Array.isArray(obj) ? [] : {};
|
||||
|
||||
for (const key in obj) {
|
||||
const value = obj[key];
|
||||
const newValue = removeEmptyObjects(value);
|
||||
if (
|
||||
!(
|
||||
newValue === null ||
|
||||
newValue === undefined ||
|
||||
(typeof newValue === 'object' && Object.keys(newValue).length === 0)
|
||||
)
|
||||
) {
|
||||
newObj[key] = newValue;
|
||||
}
|
||||
}
|
||||
|
||||
return newObj;
|
||||
}
|
|
@ -1,44 +1,72 @@
|
|||
import { ScmIntegrations } from '@backstage/integration';
|
||||
import { createPublishGiteaAction } from './gitea-actions';
|
||||
|
||||
import { CatalogClient } from '@backstage/catalog-client';
|
||||
import {
|
||||
coreServices,
|
||||
createBackendModule,
|
||||
} from '@backstage/backend-plugin-api';
|
||||
import { scaffolderActionsExtensionPoint } from '@backstage/plugin-scaffolder-node/alpha';
|
||||
import { createArgoCDApp } from './argocd';
|
||||
import { getRootLogger } from '@backstage/backend-common';
|
||||
import { createKubernetesApply } from './k8s-apply';
|
||||
import { createSanitizeResource } from './sanitize';
|
||||
import { createVerifyDependency } from './verify';
|
||||
createBuiltinActions,
|
||||
createRouter,
|
||||
} from '@backstage/plugin-scaffolder-backend';
|
||||
import { Router } from 'express';
|
||||
import type { PluginEnvironment } from '../types';
|
||||
import { ScmIntegrations } from '@backstage/integration';
|
||||
import { createInvokeArgoAction } from './workflow-argo';
|
||||
import {
|
||||
createZipAction,
|
||||
createSleepAction,
|
||||
createWriteFileAction,
|
||||
createAppendFileAction,
|
||||
createMergeJSONAction,
|
||||
createMergeAction,
|
||||
createParseFileAction,
|
||||
createSerializeYamlAction,
|
||||
createSerializeJsonAction,
|
||||
createJSONataAction,
|
||||
createYamlJSONataTransformAction,
|
||||
createJsonJSONataTransformAction,
|
||||
} from '@roadiehq/scaffolder-backend-module-utils';
|
||||
import { kubernetesApply } from './kubernetes-apply';
|
||||
|
||||
export const cnoeScaffolderActions = createBackendModule({
|
||||
pluginId: 'scaffolder',
|
||||
moduleId: 'cnoe-actions',
|
||||
register(env) {
|
||||
env.registerInit({
|
||||
deps: {
|
||||
scaffolder: scaffolderActionsExtensionPoint,
|
||||
config: coreServices.rootConfig,
|
||||
},
|
||||
async init({ scaffolder, config }) {
|
||||
const integrations = ScmIntegrations.fromConfig(config);
|
||||
const logger = getRootLogger();
|
||||
export default async function createPlugin(
|
||||
env: PluginEnvironment,
|
||||
): Promise<Router> {
|
||||
const catalogClient = new CatalogClient({
|
||||
discoveryApi: env.discovery,
|
||||
});
|
||||
const integrations = ScmIntegrations.fromConfig(env.config);
|
||||
|
||||
scaffolder.addActions(
|
||||
createPublishGiteaAction({
|
||||
integrations,
|
||||
config,
|
||||
}),
|
||||
createArgoCDApp({
|
||||
config,
|
||||
logger,
|
||||
}),
|
||||
createKubernetesApply(config),
|
||||
createSanitizeResource(),
|
||||
createVerifyDependency(),
|
||||
);
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
const builtInActions = createBuiltinActions({
|
||||
integrations,
|
||||
catalogClient,
|
||||
config: env.config,
|
||||
reader: env.reader,
|
||||
});
|
||||
|
||||
const scaffolderBackendModuleUtils = [
|
||||
createZipAction(),
|
||||
createSleepAction(),
|
||||
createWriteFileAction(),
|
||||
createAppendFileAction(),
|
||||
createMergeJSONAction({}),
|
||||
createMergeAction(),
|
||||
createParseFileAction(),
|
||||
createSerializeYamlAction(),
|
||||
createSerializeJsonAction(),
|
||||
createJSONataAction(),
|
||||
createYamlJSONataTransformAction(),
|
||||
createJsonJSONataTransformAction(),
|
||||
];
|
||||
|
||||
const actions = [
|
||||
...builtInActions,
|
||||
...scaffolderBackendModuleUtils,
|
||||
createInvokeArgoAction(env.config, env.logger),
|
||||
kubernetesApply(env.config),
|
||||
];
|
||||
|
||||
return await createRouter({
|
||||
actions: actions,
|
||||
logger: env.logger,
|
||||
config: env.config,
|
||||
database: env.database,
|
||||
reader: env.reader,
|
||||
catalogClient: catalogClient,
|
||||
identity: env.identity,
|
||||
});
|
||||
}
|
||||
|
|
66
packages/backend/src/plugins/search.ts
Normal file
66
packages/backend/src/plugins/search.ts
Normal file
|
@ -0,0 +1,66 @@
|
|||
import { useHotCleanup } from '@backstage/backend-common';
|
||||
import { createRouter } from '@backstage/plugin-search-backend';
|
||||
import {
|
||||
IndexBuilder,
|
||||
LunrSearchEngine,
|
||||
} from '@backstage/plugin-search-backend-node';
|
||||
import { PluginEnvironment } from '../types';
|
||||
import { DefaultCatalogCollatorFactory } from '@backstage/plugin-catalog-backend';
|
||||
import { DefaultTechDocsCollatorFactory } from '@backstage/plugin-techdocs-backend';
|
||||
import { Router } from 'express';
|
||||
|
||||
export default async function createPlugin(
|
||||
env: PluginEnvironment,
|
||||
): Promise<Router> {
|
||||
// Initialize a connection to a search engine.
|
||||
const searchEngine = new LunrSearchEngine({
|
||||
logger: env.logger,
|
||||
});
|
||||
const indexBuilder = new IndexBuilder({
|
||||
logger: env.logger,
|
||||
searchEngine,
|
||||
});
|
||||
|
||||
const schedule = env.scheduler.createScheduledTaskRunner({
|
||||
frequency: { minutes: 10 },
|
||||
timeout: { minutes: 15 },
|
||||
// A 3 second delay gives the backend server a chance to initialize before
|
||||
// any collators are executed, which may attempt requests against the API.
|
||||
initialDelay: { seconds: 3 },
|
||||
});
|
||||
|
||||
// Collators are responsible for gathering documents known to plugins. This
|
||||
// collator gathers entities from the software catalog.
|
||||
indexBuilder.addCollator({
|
||||
schedule,
|
||||
factory: DefaultCatalogCollatorFactory.fromConfig(env.config, {
|
||||
discovery: env.discovery,
|
||||
tokenManager: env.tokenManager,
|
||||
}),
|
||||
});
|
||||
|
||||
// collator gathers entities from techdocs.
|
||||
indexBuilder.addCollator({
|
||||
schedule,
|
||||
factory: DefaultTechDocsCollatorFactory.fromConfig(env.config, {
|
||||
discovery: env.discovery,
|
||||
logger: env.logger,
|
||||
tokenManager: env.tokenManager,
|
||||
}),
|
||||
});
|
||||
|
||||
// The scheduler controls when documents are gathered from collators and sent
|
||||
// to the search engine for indexing.
|
||||
const { scheduler } = await indexBuilder.build();
|
||||
scheduler.start();
|
||||
|
||||
useHotCleanup(module, () => scheduler.stop());
|
||||
|
||||
return await createRouter({
|
||||
engine: indexBuilder.getSearchEngine(),
|
||||
types: indexBuilder.getDocumentTypes(),
|
||||
permissions: env.permissions,
|
||||
config: env.config,
|
||||
logger: env.logger,
|
||||
});
|
||||
}
|
51
packages/backend/src/plugins/techdocs.ts
Normal file
51
packages/backend/src/plugins/techdocs.ts
Normal file
|
@ -0,0 +1,51 @@
|
|||
import { DockerContainerRunner } from '@backstage/backend-common';
|
||||
import {
|
||||
createRouter,
|
||||
Generators,
|
||||
Preparers,
|
||||
Publisher,
|
||||
} from '@backstage/plugin-techdocs-backend';
|
||||
import Docker from 'dockerode';
|
||||
import { Router } from 'express';
|
||||
import { PluginEnvironment } from '../types';
|
||||
|
||||
export default async function createPlugin(
|
||||
env: PluginEnvironment,
|
||||
): Promise<Router> {
|
||||
// Preparers are responsible for fetching source files for documentation.
|
||||
const preparers = await Preparers.fromConfig(env.config, {
|
||||
logger: env.logger,
|
||||
reader: env.reader,
|
||||
});
|
||||
|
||||
// Docker client (conditionally) used by the generators, based on techdocs.generators config.
|
||||
const dockerClient = new Docker();
|
||||
const containerRunner = new DockerContainerRunner({ dockerClient });
|
||||
|
||||
// Generators are used for generating documentation sites.
|
||||
const generators = await Generators.fromConfig(env.config, {
|
||||
logger: env.logger,
|
||||
containerRunner,
|
||||
});
|
||||
|
||||
// Publisher is used for
|
||||
// 1. Publishing generated files to storage
|
||||
// 2. Fetching files from storage and passing them to TechDocs frontend.
|
||||
const publisher = await Publisher.fromConfig(env.config, {
|
||||
logger: env.logger,
|
||||
discovery: env.discovery,
|
||||
});
|
||||
|
||||
// checks if the publisher is working and logs the result
|
||||
await publisher.getReadiness();
|
||||
|
||||
return await createRouter({
|
||||
preparers,
|
||||
generators,
|
||||
publisher,
|
||||
logger: env.logger,
|
||||
config: env.config,
|
||||
discovery: env.discovery,
|
||||
cache: env.cache,
|
||||
});
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
import { executeShellCommand } from '@backstage/plugin-scaffolder-node';
|
||||
import { createTemplateAction } from '@backstage/plugin-scaffolder-node';
|
||||
import { Writable } from 'stream';
|
||||
|
||||
class ConsoleLogStream extends Writable {
|
||||
data: string;
|
||||
|
||||
constructor(options: any) {
|
||||
super(options);
|
||||
this.data = '';
|
||||
}
|
||||
|
||||
_write(chunk: any, _: any, callback: any) {
|
||||
this.data += chunk.toString(); // Convert the chunk to a string and append it to this.data
|
||||
console.log(this.data);
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
export const createVerifyDependency = () => {
|
||||
return createTemplateAction<{
|
||||
verifiers: string[];
|
||||
}>({
|
||||
id: 'cnoe:verify:dependency',
|
||||
schema: {
|
||||
input: {
|
||||
type: 'object',
|
||||
required: ['verifiers'],
|
||||
properties: {
|
||||
verifiers: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string',
|
||||
},
|
||||
title: 'verifiers',
|
||||
description: 'The list of verifiers',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async handler(ctx) {
|
||||
const verifiers = ctx.input.verifiers;
|
||||
|
||||
if (verifiers === null || verifiers.length === 0) {
|
||||
ctx.logger.error('no verifier was supplied for the object');
|
||||
return;
|
||||
}
|
||||
|
||||
const baseCommand = 'cnoe';
|
||||
const baseArguments = ['k8s', 'verify'];
|
||||
|
||||
verifiers.forEach((verifier: string) =>
|
||||
baseArguments.push('--config', verifier),
|
||||
);
|
||||
|
||||
const logStream = new ConsoleLogStream({});
|
||||
await executeShellCommand({
|
||||
command: baseCommand,
|
||||
args: baseArguments,
|
||||
logStream: logStream,
|
||||
})
|
||||
.then(() => ctx.logger.info('verification succeeded'))
|
||||
.catch(error => {
|
||||
ctx.logger.error(error);
|
||||
throw new Error(logStream.data);
|
||||
});
|
||||
},
|
||||
});
|
||||
};
|
264
packages/backend/src/plugins/workflow-argo.ts
Normal file
264
packages/backend/src/plugins/workflow-argo.ts
Normal file
|
@ -0,0 +1,264 @@
|
|||
import {ActionContext, createTemplateAction} from "@backstage/plugin-scaffolder-node";
|
||||
import { Config } from '@backstage/config';
|
||||
import * as k8s from '@kubernetes/client-node';
|
||||
import {Logger} from "winston";
|
||||
import {HttpError} from "@kubernetes/client-node";
|
||||
|
||||
type argoInput = {
|
||||
namespace: string
|
||||
clusterName: string
|
||||
userOIDCToken: string
|
||||
templateName: string
|
||||
parameters: parameter[]
|
||||
wait?: boolean
|
||||
}
|
||||
|
||||
const argoWorkflowsGroup = 'argoproj.io'
|
||||
const argoWorkflowsVersion = 'v1alpha1'
|
||||
const argoWorkFlowPlural = 'workflows'
|
||||
const argoWorkFlowKind = 'Workflow'
|
||||
const argoWorkFlowMetadataDefault: k8s.V1ObjectMeta = {
|
||||
generateName: "backstage-scaffolding-"
|
||||
}
|
||||
|
||||
class Workflow {
|
||||
apiVersion: string = `${argoWorkflowsGroup}/${argoWorkflowsVersion}`
|
||||
kind: string = argoWorkFlowKind
|
||||
metadata: k8s.V1ObjectMeta = argoWorkFlowMetadataDefault
|
||||
spec: workflowSpec
|
||||
status?: workflowStatus
|
||||
constructor(templateName: string, namespace: string, params?: parameter[], artifacts?: object[] ) {
|
||||
this.metadata.namespace = namespace
|
||||
const args: argument = {}
|
||||
if (params) {
|
||||
args.parameters = params
|
||||
}
|
||||
if (artifacts) {
|
||||
args.artifacts = artifacts
|
||||
}
|
||||
this.spec = {
|
||||
workflowTemplateRef: {
|
||||
name: templateName
|
||||
},
|
||||
arguments: args
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type workflowSpec = {
|
||||
arguments?: argument
|
||||
entrypoint?: string
|
||||
workflowTemplateRef: workflowTemplateRef
|
||||
}
|
||||
|
||||
type workflowStatus = {
|
||||
conditions?: workflowStatusCondition[]
|
||||
phase?: string
|
||||
progress?: string
|
||||
}
|
||||
|
||||
type workflowStatusCondition = {
|
||||
message?: string
|
||||
status?: string
|
||||
type: string
|
||||
}
|
||||
|
||||
type workflowTemplateRef = {
|
||||
clusterScope?: boolean
|
||||
name: string
|
||||
}
|
||||
|
||||
type argument = {
|
||||
artifacts?: object[]
|
||||
parameters?: parameter[]
|
||||
}
|
||||
|
||||
type parameter = {
|
||||
name: string
|
||||
value: string
|
||||
valueFrom?: object
|
||||
}
|
||||
|
||||
export function createInvokeArgoAction(config: Config, logger: Logger) {
|
||||
return createTemplateAction<argoInput>({
|
||||
id: 'workflows:argo:invoke',
|
||||
description:
|
||||
'Invokes an Argo workflow using a workflow template',
|
||||
schema: {
|
||||
input: {
|
||||
type: 'object',
|
||||
required: ['namespace', 'clusterName', 'templateName'],
|
||||
properties: {
|
||||
namespace: {
|
||||
title: 'Namespace',
|
||||
description: 'Namespace to run this workflow',
|
||||
type: 'string',
|
||||
},
|
||||
clusterName: {
|
||||
title: 'Cluster name',
|
||||
description: 'Name of Cluster',
|
||||
type: 'string',
|
||||
},
|
||||
userOIDCToken: {
|
||||
title: 'User\'s OIDC token',
|
||||
description: "If specified, it will use the provided token to communicate with the Kubernetes cluster",
|
||||
type: 'string'
|
||||
},
|
||||
templateName: {
|
||||
title: 'Template name',
|
||||
description: 'Argo Workflows template name to run',
|
||||
type: 'string',
|
||||
},
|
||||
parameters: {
|
||||
title: "Argo workflows parameters",
|
||||
description: 'parameters used by the template',
|
||||
type: 'array',
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: {
|
||||
type: "string"
|
||||
},
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
wait: {
|
||||
title: 'Wait for completion',
|
||||
description: 'specify weather to wait for completion of this workflow.',
|
||||
type: 'boolean',
|
||||
}
|
||||
},
|
||||
},
|
||||
output: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workflowName: {
|
||||
title: 'Workflow name',
|
||||
type: 'string',
|
||||
},
|
||||
workflowNamespace: {
|
||||
title: 'Workflow namespace',
|
||||
type: 'string',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async handler(ctx: ActionContext<argoInput>) {
|
||||
logger.debug(`Invoked with ${JSON.stringify(ctx.input)})`)
|
||||
logger.info(JSON.stringify(ctx.secrets))
|
||||
const targetCluster = getClusterConfig(ctx.input.clusterName, config)
|
||||
const kc = new k8s.KubeConfig()
|
||||
kc.addCluster({
|
||||
name: targetCluster.getString("name"),
|
||||
caData: targetCluster.getString("caData"),
|
||||
server: targetCluster.getString("url"),
|
||||
skipTLSVerify: targetCluster.getBoolean("skipTLSVerify"),
|
||||
})
|
||||
|
||||
kc.addUser({
|
||||
name: "scaffolder-user",
|
||||
token: ctx.input.userOIDCToken? ctx.input.userOIDCToken : targetCluster.getString("serviceAccountToken")
|
||||
})
|
||||
kc.addContext({
|
||||
cluster: ctx.input.clusterName,
|
||||
user: "scaffolder-user",
|
||||
name: ctx.input.clusterName
|
||||
})
|
||||
kc.setCurrentContext(ctx.input.clusterName)
|
||||
|
||||
const client = kc.makeApiClient(k8s.CustomObjectsApi)
|
||||
const wf = new Workflow(ctx.input.templateName, ctx.input.namespace, ctx.input.parameters)
|
||||
// const body = generateBody(ctx.input.templateName, ctx.input.namespace)
|
||||
try {
|
||||
const resp = await client.createNamespacedCustomObject(
|
||||
argoWorkflowsGroup, argoWorkflowsVersion, ctx.input.namespace,
|
||||
argoWorkFlowPlural, wf
|
||||
)
|
||||
const respBody = resp.body as Workflow
|
||||
logger.debug(`Workflow ID: ${respBody.metadata.name}, namespace ${respBody.metadata.namespace}`)
|
||||
ctx.output('workflowName', respBody.metadata.name!)
|
||||
ctx.output('workflowNamespace', respBody.metadata.namespace!)
|
||||
if (ctx.input.wait) {
|
||||
await wait(kc, respBody.metadata.namespace!, respBody.metadata.name!)
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof HttpError) {
|
||||
let msg = `${err.response.statusMessage}: `
|
||||
if ("kind" in err.body && err.body.kind === "Status" && "message" in err.body) {
|
||||
msg += err.body.message
|
||||
}
|
||||
logger.info(`error : ${err.response.statusCode} ${msg}`)
|
||||
throw new Error(`Failed to talk to the cluster: ${err.response.statusCode} ${err.response.statusMessage} \n ${msg}`)
|
||||
}
|
||||
if (err instanceof Error) {
|
||||
logger.error(`error while talking to cluster: ${err.name} ${err.message}`)
|
||||
}
|
||||
throw new Error("Unknown exception was encountered.")
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getClusterConfig(name: string, config: Config): Config {
|
||||
|
||||
const clusterConfigs = config.getConfigArray("kubernetes.clusterLocatorMethods").filter(
|
||||
(val: Config) => {
|
||||
return val.getString('type') === 'config'
|
||||
}
|
||||
)
|
||||
|
||||
const clusters = new Array<Config>();
|
||||
clusterConfigs.filter( (conf: Config) => {
|
||||
const cluster = conf.getConfigArray("clusters").find( (val: Config) => {
|
||||
return val.getString("name") === name
|
||||
})
|
||||
if (cluster) {
|
||||
clusters.push(cluster)
|
||||
}
|
||||
})
|
||||
|
||||
if (clusters.length === 0 ) {
|
||||
throw new Error(`Cluster with name ${name} not found`)
|
||||
}
|
||||
return clusters[0]
|
||||
}
|
||||
|
||||
async function wait(kc: k8s.KubeConfig, namespace: string, name: string, timeoutSeconds: number = 120) {
|
||||
const client = new k8s.Watch(kc)
|
||||
return new Promise<void>( async (resolve, reject) => {
|
||||
const result = await client.watch(
|
||||
`/apis/${argoWorkflowsGroup}/${argoWorkflowsVersion}/namespaces/${namespace}/${argoWorkFlowPlural}`,
|
||||
{
|
||||
fieldSelector: `metadata.name=${name}`,
|
||||
},
|
||||
(_type, apiObj, _watchObj) => {
|
||||
if (apiObj) {
|
||||
const wf = apiObj as Workflow
|
||||
if (wf.status && wf.status.conditions) {
|
||||
const cond = wf.status.conditions.filter((val) => {
|
||||
return val.type === 'Completed' && val.status === "True"
|
||||
})
|
||||
if (cond.length > 0) {
|
||||
// result.abort()
|
||||
resolve()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
(err) => {
|
||||
if (err instanceof Error) {
|
||||
// logger.debug(`error encountered while waiting for workflow to complete: ${err.name} ${err.message}`)
|
||||
}
|
||||
}
|
||||
)
|
||||
setTimeout(() => {
|
||||
result.abort()
|
||||
reject(new Error("TIMEOUT"))
|
||||
}, timeoutSeconds * 1000)
|
||||
})
|
||||
}
|
|
@ -5,8 +5,9 @@ import {
|
|||
PluginDatabaseManager,
|
||||
PluginEndpointDiscovery,
|
||||
TokenManager,
|
||||
} from '@backstage/backend-common/dist'; //TODO: deprecated
|
||||
import { PluginTaskScheduler } from '@backstage/backend-tasks/dist';
|
||||
UrlReader,
|
||||
} from '@backstage/backend-common';
|
||||
import { PluginTaskScheduler } from '@backstage/backend-tasks';
|
||||
import { PermissionEvaluator } from '@backstage/plugin-permission-common';
|
||||
import { IdentityApi } from '@backstage/plugin-auth-node';
|
||||
|
||||
|
@ -15,6 +16,7 @@ export type PluginEnvironment = {
|
|||
database: PluginDatabaseManager;
|
||||
cache: PluginCacheManager;
|
||||
config: Config;
|
||||
reader: UrlReader;
|
||||
discovery: PluginEndpointDiscovery;
|
||||
tokenManager: TokenManager;
|
||||
scheduler: PluginTaskScheduler;
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Copyright 2023 The Backstage Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { defineConfig } from '@playwright/test';
|
||||
import { generateProjects } from '@backstage/e2e-test-utils/playwright';
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
timeout: 60_000,
|
||||
|
||||
expect: {
|
||||
timeout: 5_000,
|
||||
},
|
||||
|
||||
// Run your local dev server before starting the tests
|
||||
webServer: process.env.CI
|
||||
? []
|
||||
: [
|
||||
{
|
||||
command: 'yarn start',
|
||||
port: 3000,
|
||||
reuseExistingServer: true,
|
||||
timeout: 60_000,
|
||||
},
|
||||
],
|
||||
|
||||
forbidOnly: !!process.env.CI,
|
||||
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
|
||||
reporter: [['html', { open: 'never', outputFolder: 'e2e-test-report' }]],
|
||||
|
||||
use: {
|
||||
actionTimeout: 0,
|
||||
baseURL:
|
||||
process.env.PLAYWRIGHT_URL ??
|
||||
(process.env.CI ? 'http://localhost:7007' : 'http://localhost:3000'),
|
||||
screenshot: 'only-on-failure',
|
||||
trace: 'on-first-retry',
|
||||
},
|
||||
|
||||
outputDir: 'node_modules/.cache/e2e-test-results',
|
||||
|
||||
projects: generateProjects(), // Find all packages with e2e-test folders
|
||||
});
|
|
@ -4,6 +4,6 @@ This is where your own plugins and their associated modules live, each in a
|
|||
separate folder of its own.
|
||||
|
||||
If you want to create a new plugin here, go to your project root directory, run
|
||||
the command `yarn new`, and follow the on-screen instructions.
|
||||
the command `yarn backstage-cli create`, and follow the on-screen instructions.
|
||||
|
||||
You can also check out existing plugins on [the plugin marketplace](https://backstage.io/plugins)!
|
||||
|
|
1
plugins/workflows/.eslintrc.js
Normal file
1
plugins/workflows/.eslintrc.js
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require('@backstage/cli/config/eslint-factory')(__dirname);
|
13
plugins/workflows/README.md
Normal file
13
plugins/workflows/README.md
Normal file
|
@ -0,0 +1,13 @@
|
|||
# workflows
|
||||
|
||||
Welcome to the workflows plugin!
|
||||
|
||||
_This plugin was created through the Backstage CLI_
|
||||
|
||||
## Getting started
|
||||
|
||||
Your plugin has been added to the example app in this repository, meaning you'll be able to access it by running `yarn start` in the root directory, and then navigating to [/workflows](http://localhost:3000/workflows).
|
||||
|
||||
You can also serve the plugin in isolation by running `yarn start` in the plugin directory.
|
||||
This method of serving the plugin provides quicker iteration speed and a faster startup and hot reloads.
|
||||
It is only meant for local development, and the setup for it can be found inside the [/dev](./dev) directory.
|
53
plugins/workflows/package.json
Normal file
53
plugins/workflows/package.json
Normal file
|
@ -0,0 +1,53 @@
|
|||
{
|
||||
"name": "@internal/plugin-workflows",
|
||||
"version": "0.1.0",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"license": "Apache-2.0",
|
||||
"private": true,
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"main": "dist/index.esm.js",
|
||||
"types": "dist/index.d.ts"
|
||||
},
|
||||
"backstage": {
|
||||
"role": "frontend-plugin"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "backstage-cli package start",
|
||||
"build": "backstage-cli package build",
|
||||
"lint": "backstage-cli package lint",
|
||||
"test": "backstage-cli package test",
|
||||
"clean": "backstage-cli package clean",
|
||||
"prepack": "backstage-cli package prepack",
|
||||
"postpack": "backstage-cli package postpack"
|
||||
},
|
||||
"dependencies": {
|
||||
"@backstage/core-components": "^0.13.1",
|
||||
"@backstage/core-plugin-api": "^1.5.1",
|
||||
"@backstage/plugin-catalog-react": "^1.6.0",
|
||||
"@backstage/theme": "^0.3.0",
|
||||
"@material-ui/core": "^4.12.2",
|
||||
"@material-ui/icons": "^4.9.1",
|
||||
"@material-ui/lab": "4.0.0-alpha.57",
|
||||
"react-use": "^17.2.4"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.13.1 || ^17.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@backstage/cli": "^0.22.7",
|
||||
"@backstage/core-app-api": "^1.8.0",
|
||||
"@backstage/dev-utils": "^1.0.15",
|
||||
"@backstage/test-utils": "^1.3.1",
|
||||
"@testing-library/jest-dom": "^5.10.1",
|
||||
"@testing-library/react": "^12.1.3",
|
||||
"@testing-library/user-event": "^14.0.0",
|
||||
"@types/node": "*",
|
||||
"cross-fetch": "^3.1.5",
|
||||
"msw": "^0.49.0"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
// import React from 'react';
|
||||
// import { rest } from 'msw';
|
||||
// import { setupServer } from 'msw/node';
|
||||
// import { screen } from '@testing-library/react';
|
||||
// import {
|
||||
// setupRequestMockHandlers,
|
||||
// renderInTestApp,
|
||||
// } from "@backstage/test-utils";
|
||||
|
||||
// describe('ExampleComponent', () => {
|
||||
// const server = setupServer();
|
||||
// // Enable sane handlers for network requests
|
||||
// setupRequestMockHandlers(server);
|
||||
//
|
||||
// // setup mock response
|
||||
// beforeEach(() => {
|
||||
// server.use(
|
||||
// rest.get('/*', (_, res, ctx) => res(ctx.status(200), ctx.json({}))),
|
||||
// );
|
||||
// });
|
||||
//
|
||||
// it('should render', async () => {
|
||||
// await renderInTestApp(<ExampleComponent />);
|
||||
// expect(screen.getByText('Welcome to workflows!')).toBeInTheDocument();
|
||||
// });
|
||||
// });
|
|
@ -0,0 +1,39 @@
|
|||
import React from 'react';
|
||||
import {Grid} from '@material-ui/core';
|
||||
import {
|
||||
Page,
|
||||
Content,
|
||||
} from '@backstage/core-components';
|
||||
import {FetchTFState, ManageBlueprint} from "./FetchTFState";
|
||||
|
||||
export const BlueprintsComponent = () => (
|
||||
<Page themeId="tool">
|
||||
<Content>
|
||||
{/* <ContentHeader title="Blueprint information">*/}
|
||||
{/* <SupportButton>A description of your plugin goes here.</SupportButton>*/}
|
||||
{/* </ContentHeader>*/}
|
||||
<Grid container spacing={3} direction="column">
|
||||
<Grid item>
|
||||
<ManageBlueprint />
|
||||
{/* <InfoCard title="Blueprint management">*/}
|
||||
{/* <Typography color="textSecondary">*/}
|
||||
{/* Manage this blueprint deployment*/}
|
||||
{/* </Typography>*/}
|
||||
{/* <IconButton aria-label="delete" size="medium">*/}
|
||||
{/* <DeleteIcon />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/* <IconButton aria-label="clear" size="medium">*/}
|
||||
{/* <ClearIcon />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/* <IconButton aria-label="link" size="medium">*/}
|
||||
{/* <LinkOffRounded />*/}
|
||||
{/* </IconButton>*/}
|
||||
{/* </InfoCard>*/}
|
||||
</Grid>
|
||||
<Grid item>
|
||||
<FetchTFState />
|
||||
</Grid>
|
||||
</Grid>
|
||||
</Content>
|
||||
</Page>
|
||||
);
|
|
@ -0,0 +1,353 @@
|
|||
import React, {useState} from 'react';
|
||||
import {Table, TableColumn, Progress, InfoCard, LinkButton} from '@backstage/core-components';
|
||||
import Alert from '@material-ui/lab/Alert';
|
||||
import useAsync from 'react-use/lib/useAsync';
|
||||
|
||||
import {
|
||||
DiscoveryApi,
|
||||
discoveryApiRef, OpenIdConnectApi,
|
||||
useApi
|
||||
} from '@backstage/core-plugin-api';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import {gunzipSync} from "zlib";
|
||||
import {useEntity} from "@backstage/plugin-catalog-react"
|
||||
import {
|
||||
Dialog, DialogActions,
|
||||
DialogContent,
|
||||
DialogContentText,
|
||||
DialogTitle,
|
||||
IconButton,
|
||||
Typography
|
||||
} from "@material-ui/core";
|
||||
import DeleteIcon from "@material-ui/icons/Delete";
|
||||
import ClearIcon from "@material-ui/icons/Clear";
|
||||
import LinkOffRounded from "@material-ui/icons/LinkOffRounded";
|
||||
import {keycloakOIDCAuthApiRef} from "../../plugin";
|
||||
|
||||
type TFState = {
|
||||
terraform_version?: string
|
||||
resources: {
|
||||
name: string
|
||||
provider: string
|
||||
type: string
|
||||
instances: {
|
||||
attributes: {
|
||||
arn: string
|
||||
id: string
|
||||
}
|
||||
}[]
|
||||
}[]
|
||||
}
|
||||
|
||||
type Resource = {
|
||||
name: string
|
||||
provider: string
|
||||
type: string
|
||||
arn?: string
|
||||
id?: string
|
||||
}
|
||||
|
||||
type TFTableProps = {
|
||||
resources: Resource[]
|
||||
}
|
||||
|
||||
export const TFTable = (props: TFTableProps) => {
|
||||
|
||||
const columns: TableColumn[] = [
|
||||
{ title: 'Name', field: 'name' },
|
||||
{ title: 'Provider', field: 'provider' },
|
||||
{ title: 'Type', field: 'type' },
|
||||
{ title: 'Arn', field: 'arn' },
|
||||
{ title: 'ID', field: 'id' },
|
||||
];
|
||||
|
||||
return (
|
||||
<Table
|
||||
title="Resources provisioned by Terraform"
|
||||
options={{ search: true, paging: false }}
|
||||
columns={columns}
|
||||
data={props.resources}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export const FetchTFState = () => {
|
||||
const apiRef = useApi(discoveryApiRef)
|
||||
const entity = useEntity()
|
||||
const oidcApi = useApi(keycloakOIDCAuthApiRef)
|
||||
const secretName = `tfstate-default-${entity.entity.metadata.name}`
|
||||
const { value, loading, error } = useAsync((): Promise<TFState> => {
|
||||
return getTFState(secretName, "admin", apiRef, oidcApi)
|
||||
})
|
||||
if (loading) {
|
||||
return <Progress />
|
||||
} else if (error) {
|
||||
return <Alert severity="error">{error}</Alert>;
|
||||
}
|
||||
|
||||
// const a = await getTFState("tfstate-default-helloworld", "flux-system", discoveryApi)
|
||||
// const tfdata = tfstate as TFState
|
||||
const resources = value!.resources.map(val => {
|
||||
const out: Resource = {
|
||||
name: val.name,
|
||||
provider: val.provider,
|
||||
type: val.type,
|
||||
}
|
||||
if (val.instances.length > 0) {
|
||||
out.arn = val.instances[0].attributes.arn
|
||||
out.id = val.instances[0].attributes.id
|
||||
}
|
||||
return out
|
||||
})
|
||||
|
||||
return <TFTable resources={resources}/>
|
||||
};
|
||||
|
||||
// horrible
|
||||
type payload = {
|
||||
kind: string
|
||||
apiVersion: string
|
||||
items?: {
|
||||
metadata: {
|
||||
labels: {
|
||||
[key: string]: string
|
||||
}
|
||||
}
|
||||
}[]
|
||||
metadata: {
|
||||
annotations?: {
|
||||
[key: string]: string
|
||||
}
|
||||
}
|
||||
type: string
|
||||
data: {
|
||||
tfstate: string
|
||||
}
|
||||
}
|
||||
|
||||
async function getTFState(name: string, namespace: string, apiRef: DiscoveryApi, oidcRef: OpenIdConnectApi): Promise<TFState> {
|
||||
const token = await oidcRef.getIdToken()
|
||||
const baseUrl = await apiRef.getBaseUrl("kubernetes")
|
||||
const proxyUrl = `${baseUrl}/proxy`
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const resp = await fetch(`${proxyUrl}/api/v1/namespaces/${namespace}/secrets/${name}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'X-Kubernetes-Cluster': "canoe-packaging",
|
||||
'Authorization': `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
if (resp.ok) {
|
||||
const payload = await resp.json() as payload
|
||||
const data = Buffer.from(payload.data.tfstate, 'base64')
|
||||
let compression = "gzip"
|
||||
if ( payload.metadata.annotations && "encoding" in payload.metadata.annotations) {
|
||||
compression = payload.metadata.annotations.encoding
|
||||
}
|
||||
if (compression === "gzip") {
|
||||
const a = gunzipSync(data).toString("utf-8")
|
||||
resolve(JSON.parse(a) as TFState)
|
||||
}
|
||||
reject(`unknown compression method specified: ${compression}`)
|
||||
} else {
|
||||
if (resp.status === 404) {
|
||||
resolve( {resources: []} as TFState)
|
||||
}
|
||||
reject(`Failed to retrieve terraform information: ${resp.status}: ${resp.statusText} `)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
enum workflowStatus {
|
||||
UNKNOWN,
|
||||
DELETING = 0,
|
||||
CREATING,
|
||||
DELETED,
|
||||
NOTFOUND,
|
||||
FAILED
|
||||
}
|
||||
export const ManageBlueprint = () => {
|
||||
const entity = useEntity()
|
||||
const [open, setOpen] = useState(false);
|
||||
|
||||
const handleClickOpen = () => {
|
||||
setOpen(true);
|
||||
};
|
||||
|
||||
const handleClose = () => {
|
||||
setOpen(false);
|
||||
};
|
||||
const oidcApi = useApi(keycloakOIDCAuthApiRef)
|
||||
|
||||
const apiRef = useApi(discoveryApiRef)
|
||||
const { value, loading, error } = useAsync((): Promise<workflowStatus> => {
|
||||
return getWorkflow(entity.entity.metadata.name, "admin", apiRef, oidcApi)
|
||||
})
|
||||
const module = entity.entity.metadata.annotations!["blueprint-module"]
|
||||
if (module === undefined) {
|
||||
return <Alert severity="error">"could not find blueprint module"</Alert>;
|
||||
}
|
||||
|
||||
const handleConfirm = async (): Promise<void> => {
|
||||
const ok = await createWorkflow(entity.entity.metadata.name, module, "admin", apiRef, oidcApi)
|
||||
if (ok) {
|
||||
handleClose()
|
||||
} else {
|
||||
console.log("oh no")
|
||||
}
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
return <Progress />
|
||||
} else if (error) {
|
||||
return <Alert severity="error">{error}</Alert>;
|
||||
}
|
||||
let text: string
|
||||
switch (value) {
|
||||
case workflowStatus.DELETING:
|
||||
text = "This blueprint deployment is being deleted"
|
||||
break
|
||||
case workflowStatus.DELETED:
|
||||
text = "This blueprint deployment was successfully deleted"
|
||||
break
|
||||
case workflowStatus.NOTFOUND:
|
||||
text = "Manage this blueprint with the buttons below"
|
||||
break
|
||||
case workflowStatus.FAILED:
|
||||
return <Alert severity="error">"failed to delete blueprint deployment"</Alert>;
|
||||
default:
|
||||
return <Alert severity="error">"could not determine blueprint status"</Alert>;
|
||||
}
|
||||
|
||||
return (
|
||||
<InfoCard title="Blueprint management">
|
||||
<Typography color="textSecondary">
|
||||
{text}
|
||||
</Typography>
|
||||
<IconButton aria-label="delete" size="medium" onClick={handleClickOpen}>
|
||||
<DeleteIcon />
|
||||
</IconButton>
|
||||
<Dialog open={open} onClose={handleClose}>
|
||||
<DialogTitle style={{ cursor: 'move' }} id="title">
|
||||
Confirmation
|
||||
</DialogTitle>
|
||||
<DialogContent>
|
||||
<DialogContentText>
|
||||
Are you sure you want to delete this?
|
||||
</DialogContentText>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<LinkButton onClick={handleClose} to="cba">
|
||||
Cancel
|
||||
</LinkButton>
|
||||
<LinkButton onClick={handleConfirm}
|
||||
to="abc" color="primary">Delete</LinkButton>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
<IconButton aria-label="clear" size="medium">
|
||||
<ClearIcon />
|
||||
</IconButton>
|
||||
<IconButton aria-label="link" size="medium">
|
||||
<LinkOffRounded />
|
||||
</IconButton>
|
||||
</InfoCard>
|
||||
)
|
||||
}
|
||||
async function getWorkflow(entityId: string, namespace: string, apiRef: DiscoveryApi, oidcRef: OpenIdConnectApi ): Promise<workflowStatus> {
|
||||
const token = await oidcRef.getIdToken()
|
||||
const baseUrl = await apiRef.getBaseUrl("kubernetes")
|
||||
const proxyUrl = `${baseUrl}/proxy`
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const queryParams = new URLSearchParams({
|
||||
labelSelector: `entity-id=${entityId},workflow-kind=delete`,
|
||||
limit: "1"
|
||||
}).toString()
|
||||
|
||||
const resp = await fetch(`${proxyUrl}/apis/argoproj.io/v1alpha1/namespaces/${namespace}/workflows?${queryParams}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'X-Kubernetes-Cluster': "canoe-packaging",
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
if (resp.ok) {
|
||||
const payload = await resp.json() as payload
|
||||
if (payload.items!.length > 0) {
|
||||
const labels = payload.items![0].metadata.labels
|
||||
if ("workflows.argoproj.io/phase" in labels) {
|
||||
switch (labels["workflows.argoproj.io/phase"]) {
|
||||
case "Running":
|
||||
resolve(workflowStatus.DELETING)
|
||||
break;
|
||||
case "Succeeded":
|
||||
resolve(workflowStatus.DELETED)
|
||||
break
|
||||
case "Failed":
|
||||
resolve(workflowStatus.FAILED)
|
||||
break
|
||||
default:
|
||||
reject(workflowStatus.UNKNOWN)
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
resolve(workflowStatus.NOTFOUND)
|
||||
}
|
||||
} else {
|
||||
reject(`Failed to retrieve terraform information: ${resp.status}: ${resp.statusText} `)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function createWorkflow(entityId: string, blueprintName: string, namespace: string, apiRef: DiscoveryApi, oidcRef: OpenIdConnectApi): Promise<Boolean> {
|
||||
const token = await oidcRef.getIdToken()
|
||||
const baseUrl = await apiRef.getBaseUrl("kubernetes")
|
||||
const proxyUrl = `${baseUrl}/proxy`
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const queryParams = new URLSearchParams({
|
||||
fieldValidation: "Strict",
|
||||
}).toString()
|
||||
const body = {
|
||||
"apiVersion": "argoproj.io/v1alpha1",
|
||||
"kind": "Workflow",
|
||||
"metadata": {
|
||||
"generateName": "blue-prints-delete-",
|
||||
"namespace": "admin",
|
||||
},
|
||||
"spec": {
|
||||
"arguments": {
|
||||
"parameters": [
|
||||
{
|
||||
"name": "blueprint-name",
|
||||
"value": `${blueprintName}`
|
||||
},
|
||||
{
|
||||
"name": "entityId",
|
||||
"value": `${entityId}`
|
||||
}
|
||||
]
|
||||
},
|
||||
"workflowTemplateRef": {
|
||||
"name": "blueprints-delete"
|
||||
}
|
||||
}
|
||||
}
|
||||
const resp = await fetch(`${proxyUrl}/apis/argoproj.io/v1alpha1/namespaces/${namespace}/workflows?${queryParams}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'X-Kubernetes-Cluster': "canoe-packaging",
|
||||
Authorization: `Bearer ${token}`,
|
||||
'Content-Type': "application/json"
|
||||
},
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
if (resp.ok) {
|
||||
resolve(true)
|
||||
} else {
|
||||
reject(`Failed to delete blueprints deployment: ${resp.status}: ${resp.statusText} `)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
export { BlueprintsComponent } from './ExampleComponent';
|
7700
plugins/workflows/src/components/BlueprintComponent/terraform.ts
Normal file
7700
plugins/workflows/src/components/BlueprintComponent/terraform.ts
Normal file
File diff suppressed because it is too large
Load diff
2
plugins/workflows/src/index.ts
Normal file
2
plugins/workflows/src/index.ts
Normal file
|
@ -0,0 +1,2 @@
|
|||
|
||||
export { workflowsPlugin, EntityWorkflowsContent, keycloakOIDCAuthApiRef } from './plugin';
|
7
plugins/workflows/src/plugin.test.ts
Normal file
7
plugins/workflows/src/plugin.test.ts
Normal file
|
@ -0,0 +1,7 @@
|
|||
import { workflowsPlugin } from './plugin';
|
||||
|
||||
describe('workflows', () => {
|
||||
it('should export plugin', () => {
|
||||
expect(workflowsPlugin).toBeDefined();
|
||||
});
|
||||
});
|
31
plugins/workflows/src/plugin.ts
Normal file
31
plugins/workflows/src/plugin.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
import {
|
||||
ApiRef, BackstageIdentityApi, createApiRef,
|
||||
createPlugin,
|
||||
createRoutableExtension,
|
||||
OpenIdConnectApi,
|
||||
ProfileInfoApi, SessionApi
|
||||
} from '@backstage/core-plugin-api';
|
||||
|
||||
import { rootCatalogWorkflowsRouteRef } from './routes';
|
||||
|
||||
export const workflowsPlugin = createPlugin({
|
||||
id: 'workflows',
|
||||
routes: {
|
||||
entityContent: rootCatalogWorkflowsRouteRef,
|
||||
},
|
||||
});
|
||||
|
||||
export const EntityWorkflowsContent = workflowsPlugin.provide(
|
||||
createRoutableExtension({
|
||||
name: 'EntityWorkflowsContent',
|
||||
component: () =>
|
||||
import('./components/BlueprintComponent').then(m => m.BlueprintsComponent),
|
||||
mountPoint: rootCatalogWorkflowsRouteRef,
|
||||
}),
|
||||
);
|
||||
|
||||
export const keycloakOIDCAuthApiRef: ApiRef<
|
||||
OpenIdConnectApi & ProfileInfoApi & BackstageIdentityApi & SessionApi
|
||||
> = createApiRef({
|
||||
id: 'auth.keycloak-oidc-provider',
|
||||
});
|
5
plugins/workflows/src/routes.ts
Normal file
5
plugins/workflows/src/routes.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
import { createRouteRef } from '@backstage/core-plugin-api';
|
||||
|
||||
export const rootCatalogWorkflowsRouteRef = createRouteRef({
|
||||
id: 'workflows',
|
||||
});
|
2
plugins/workflows/src/setupTests.ts
Normal file
2
plugins/workflows/src/setupTests.ts
Normal file
|
@ -0,0 +1,2 @@
|
|||
import '@testing-library/jest-dom';
|
||||
import 'cross-fetch/polyfill';
|
80
test-template.yaml
Normal file
80
test-template.yaml
Normal file
|
@ -0,0 +1,80 @@
|
|||
apiVersion: scaffolder.backstage.io/v1beta3
|
||||
kind: Template
|
||||
metadata:
|
||||
name: test-template
|
||||
title: TESTING
|
||||
description: test
|
||||
spec:
|
||||
owner: backstage/techdocs-core
|
||||
type: service
|
||||
# these are the steps which are rendered in the frontend with the form input
|
||||
parameters:
|
||||
- title: Fill in some steps
|
||||
required:
|
||||
- name
|
||||
- owner
|
||||
properties:
|
||||
name:
|
||||
title: Application Name
|
||||
type: string
|
||||
description: Unique name of the component
|
||||
ui:autofocus: true
|
||||
ui:options:
|
||||
rows: 5
|
||||
owner:
|
||||
title: Owner
|
||||
type: string
|
||||
description: Owner of the component
|
||||
ui:field: OwnerPicker
|
||||
ui:options:
|
||||
catalogFilter:
|
||||
kind: Group
|
||||
labels:
|
||||
title: Labels
|
||||
type: object
|
||||
additionalProperties:
|
||||
type: string
|
||||
description: Labels to apply to the application
|
||||
namespace:
|
||||
title: Namespace
|
||||
type: string
|
||||
description: Namespace to deploy this application into. Optional. Defaults to application name.
|
||||
ui:options:
|
||||
rows: 5
|
||||
clusterName:
|
||||
title: Cluster Name
|
||||
type: string
|
||||
default: canoe-packaging
|
||||
description: Name of the cluster to run this in
|
||||
- title: Workflow params
|
||||
properties:
|
||||
workflowParams:
|
||||
title: workflow parameters
|
||||
type: array
|
||||
description: workflow parameters
|
||||
ui:autofocus: true
|
||||
items:
|
||||
type: object
|
||||
required:
|
||||
- name
|
||||
- value
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
value:
|
||||
type: string
|
||||
steps:
|
||||
- id: flow
|
||||
name: Flow
|
||||
action: workflows:argo:invoke
|
||||
input:
|
||||
templateName: workflow-template-whalesay-template
|
||||
namespace: admin
|
||||
clusterName: ${{ parameters.clusterName }}
|
||||
parameters: ${{ parameters.workflowParams }}
|
||||
|
||||
# output:
|
||||
# links:
|
||||
# - title: Open in catalog
|
||||
# icon: catalog
|
||||
# entityRef: ${{ steps['register'].output.entityRef }}
|
|
@ -1,7 +1,10 @@
|
|||
{
|
||||
"extends": "@backstage/cli/config/tsconfig.json",
|
||||
"include": [
|
||||
"packages/*/src"
|
||||
"packages/*/src",
|
||||
"plugins/*/src",
|
||||
"plugins/*/dev",
|
||||
"plugins/*/migrations"
|
||||
],
|
||||
"exclude": ["node_modules"],
|
||||
"compilerOptions": {
|
||||
|
|
Loading…
Reference in a new issue