Merge pull request 'Release 1.0.0' (#5) from development into main

Reviewed-on: DevFW-CICD/stacks#5
This commit is contained in:
richardrobertreitz 2024-12-18 09:29:47 +00:00
commit d162c73a84
101 changed files with 4913 additions and 640 deletions

View file

@ -13,7 +13,7 @@ spec:
namespace: argocd
source:
path: registry
repoURL: 'https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder'
repoURL: 'https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder'
targetRevision: HEAD
project: default
syncPolicy:

View file

@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View file

@ -1,24 +0,0 @@
apiVersion: v2
name: forgejo-runner
description: A Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

View file

@ -1,7 +0,0 @@
{{- if not .Values.registration.enabled}}
You will have to manually create a secret with the registration token, since you have not specified the registration token in the values.yaml file.
To create a secret with the registration token, run the following command:
kubectl create secret generic {{ include "forgejo-runner.fullname" . }}-token --from-literal=token=<token>
{{- end}}

View file

@ -1,62 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "forgejo-runner.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "forgejo-runner.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "forgejo-runner.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "forgejo-runner.labels" -}}
helm.sh/chart: {{ include "forgejo-runner.chart" . }}
{{ include "forgejo-runner.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "forgejo-runner.selectorLabels" -}}
app.kubernetes.io/name: {{ include "forgejo-runner.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "forgejo-runner.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "forgejo-runner.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View file

@ -1,82 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
labels:
{{- include "forgejo-runner.labels" . | nindent 4 }}
name: {{ include "forgejo-runner.fullname" . }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
{{- include "forgejo-runner.selectorLabels" . | nindent 6 }}
strategy: {}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "forgejo-runner.labels" . | nindent 8 }}
{{- with .Values.podLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
volumes:
- name: docker-certs
emptyDir: {}
- name: runner-data
emptyDir: {}
initContainers:
- name: runner-register
image: {{ .Values.image.repository }}:{{ .Values.image.tag }}
command: ["forgejo-runner", "register", "--no-interactive", "--token", $(RUNNER_SECRET), "--name", $(RUNNER_NAME), "--instance", $(FORGEJO_INSTANCE_URL)]
env:
- name: RUNNER_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: RUNNER_SECRET
valueFrom:
secretKeyRef:
name: {{ include "forgejo-runner.fullname" . }}-token
key: token
- name: FORGEJO_INSTANCE_URL
value: {{ .Values.forgejoUrl }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
volumeMounts:
- name: runner-data
mountPath: /data
containers:
- name: runner
image: {{ .Values.image.repository }}:{{ .Values.image.tag }}
command: ["sh", "-c", "while ! nc -z localhost 2376 </dev/null; do echo 'waiting for docker daemon...'; sleep 5; done; forgejo-runner daemon"]
env:
- name: DOCKER_HOST
value: tcp://localhost:2376
- name: DOCKER_CERT_PATH
value: /certs/client
- name: DOCKER_TLS_VERIFY
value: "1"
volumeMounts:
- name: docker-certs
mountPath: /certs
- name: runner-data
mountPath: /data
- name: daemon
image: docker:23.0.6-dind
env:
- name: DOCKER_TLS_CERTDIR
value: /certs
securityContext:
privileged: true
volumeMounts:
- name: docker-certs
mountPath: /certs

View file

@ -1,13 +0,0 @@
{{- if .Values.registration.enabled }}
# Secret data.
# You will need to retrive this from the web UI, and your Forgejo instance must be running v1.21+
# Alternatively, create this with
# kubectl create secret generic runner-secret --from-literal=token=your_offline_token_here
apiVersion: v1
stringData:
token: {{ .Values.registration.token }}
kind: Secret
metadata:
name: {{ include "forgejo-runner.fullname" . }}-token
namespace: {{ .Release.Namespace }}
{{- end }}

View file

@ -1,45 +0,0 @@
replicaCount: 2
image:
repository: code.forgejo.org/forgejo/runner
pullPolicy: IfNotPresent
tag: "3.5.1"
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
podAnnotations: {}
podLabels: {}
podSecurityContext: {}
# fsGroup: 2000
securityContext: {}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
resources:
limits:
cpu: 500m
memory: 128Mi
requests:
cpu: 100m
memory: 128Mi
nodeSelector: {}
tolerations: []
affinity: {}
registration:
enabled: false
token: ""
forgejoUrl: https://forgejo-domain

View file

@ -13,7 +13,7 @@ spec:
namespace: argocd
source:
path: stacks/core
repoURL: 'https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder'
repoURL: 'https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder'
targetRevision: HEAD
project: default
syncPolicy:

View file

@ -0,0 +1,24 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: local-backup
namespace: argocd
labels:
env: dev
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
destination:
name: in-cluster
namespace: argocd
source:
path: stacks/local-backup
repoURL: 'https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder'
targetRevision: HEAD
project: default
syncPolicy:
automated:
prune: true
selfHeal: true
syncOptions:
- CreateNamespace=true

View file

@ -13,7 +13,7 @@ spec:
namespace: argocd
source:
path: stacks/monitoring
repoURL: 'https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder'
repoURL: 'https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder'
targetRevision: HEAD
project: default
syncPolicy:

View file

@ -13,7 +13,7 @@ spec:
namespace: argocd
source:
path: stacks/ref-implementation
repoURL: 'https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder'
repoURL: 'https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder'
targetRevision: HEAD
project: default
syncPolicy:

View file

@ -13,7 +13,7 @@ spec:
namespace: argocd
source:
path: stacks/second-cluster
repoURL: 'https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder'
repoURL: 'https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder'
targetRevision: HEAD
project: default
syncPolicy:

View file

@ -22,6 +22,6 @@ spec:
helm:
valueFiles:
- $values/stacks/core/argocd/values.yaml
- repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values

View file

@ -1,44 +0,0 @@
global:
domain: {{ .Values.edfbuilderTargetDomain }}
configs:
params:
server.insecure: true
server.basehref: /argocd
cm:
application.resourceTrackingMethod: annotation
timeout.reconciliation: 60s
resource.exclusions: |
- apiGroups:
- "*"
kinds:
- ProviderConfigUsage
accounts.provider-argocd: apiKey
rbac:
policy.csv: 'g, provider-argocd, role:admin'
tls:
certificates:
notifications:
enabled: false
dex:
enabled: false
server:
ingress:
enabled: true
ingressClassName: nginx
annotations:
nginx.ingress.kubernetes.io/backend-protocol: HTTP
nginx.ingress.kubernetes.io/rewrite-target: /$2
nginx.ingress.kubernetes.io/use-regex: "true"
nginx.ingress.kubernetes.io/force-ssl-redirect: "true"
path: /argocd(/|$)(.*)
pathType: ImplementationSpecific
extraTls:
- hosts:
- {{ .Values.edfbuilderTargetDomain }}
secretName: argocd-net-tls

View file

@ -1,53 +0,0 @@
global:
domain: {{ .Values.edfbuilderTargetDomain }}
configs:
params:
server.insecure: true
server.basehref: /argocd
cm:
application.resourceTrackingMethod: annotation
timeout.reconciliation: 60s
resource.exclusions: |
- apiGroups:
- "*"
kinds:
- ProviderConfigUsage
- apiGroups:
- cilium.io
kinds:
- CiliumIdentity
clusters:
- "*"
accounts.provider-argocd: apiKey
rbac:
policy.csv: 'g, provider-argocd, role:admin'
tls:
certificates:
notifications:
enabled: false
dex:
enabled: false
server:
ingress:
enabled: true
ingressClassName: nginx
annotations:
dns.gardener.cloud/class: garden
dns.gardener.cloud/dnsnames: {{ .Values.edfbuilderTargetDomain }}
dns.gardener.cloud/ttl: "600"
nginx.ingress.kubernetes.io/backend-protocol: HTTP
nginx.ingress.kubernetes.io/rewrite-target: /$2
nginx.ingress.kubernetes.io/use-regex: "true"
nginx.ingress.kubernetes.io/force-ssl-redirect: "true"
path: /argocd(/|$)(.*)
pathType: ImplementationSpecific
extraTls:
- hosts:
- {{ .Values.edfbuilderTargetDomain }}
secretName: argocd-net-tls

View file

@ -0,0 +1,33 @@
global:
domain: {{{ .Env.DOMAIN }}}
configs:
params:
server.insecure: true
server.basehref: /argocd
cm:
application.resourceTrackingMethod: annotation
timeout.reconciliation: 60s
resource.exclusions: |
- apiGroups:
- "*"
kinds:
- ProviderConfigUsage
- apiGroups:
- cilium.io
kinds:
- CiliumIdentity
clusters:
- "*"
accounts.provider-argocd: apiKey
rbac:
policy.csv: 'g, provider-argocd, role:admin'
tls:
certificates:
notifications:
enabled: false
dex:
enabled: false

View file

@ -17,7 +17,7 @@ spec:
namespace: crossplane-system
source:
path: stacks/core/crossplane-compositions
repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
directory:
recurse: true

View file

@ -1,3 +1,4 @@
{{{ if eq .Env.CLUSTER_TYPE "kind" }}}
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
@ -17,5 +18,6 @@ spec:
namespace: crossplane-system
source:
path: stacks/core/crossplane-providers
repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
{{{ end }}}

View file

@ -1,9 +1,9 @@
apiVersion: pkg.crossplane.io/v1
kind: Provider
metadata:
name: richardrobertreitz-provider-kind
name: provider-kind
spec:
package: forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/richardrobertreitz/provider-kind:v0.1.0
package: forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/devfw-cicd/provider-kind:v0.1.0
packagePullPolicy: IfNotPresent # Only download the package if it isnt in the cache.
revisionActivationPolicy: Automatic # Otherwise our Provider never gets activate & healthy
revisionHistoryLimit: 1

View file

@ -1,9 +1,9 @@
apiVersion: pkg.crossplane.io/v1
kind: Provider
metadata:
name: richardrobertreitz-provider-shell
name: provider-shell
spec:
package: forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/richardrobertreitz/provider-shell:v0.1.0
package: forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/devfw-cicd/provider-shell:v0.1.1
packagePullPolicy: IfNotPresent # Only download the package if it isnt in the cache.
revisionActivationPolicy: Automatic # Otherwise our Provider never gets activate & healthy
revisionHistoryLimit: 1

View file

@ -1,3 +0,0 @@
replicaCount: 1
forgejoUrl: http://forgejo-http.gitea.svc.cluster.local:3000

View file

@ -22,6 +22,6 @@ spec:
helm:
valueFiles:
- $values/stacks/core/forgejo/values.yaml
- repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values

View file

@ -1,75 +0,0 @@
redis-cluster:
enabled: false
postgresql:
enabled: false
postgresql-ha:
enabled: false
persistence:
enabled: true
size: 5Gi
test:
enabled: false
gitea:
admin:
existingSecret: gitea-credential
config:
database:
DB_TYPE: sqlite3
session:
PROVIDER: memory
cache:
ADAPTER: memory
queue:
TYPE: level
server:
DOMAIN: 'gitea.{{ .Values.edfbuilderTargetDomain }}'
ROOT_URL: 'https://gitea.{{ .Values.edfbuilderTargetDomain }}:443'
service:
ssh:
type: NodePort
nodePort: 32222
externalTrafficPolicy: Local
ingress:
# NOTE: The ingress is generated in a later step for path based routing feature See: hack/argo-cd/generate-manifests.sh
enabled: true
className: nginx
annotations:
dns.gardener.cloud/class: garden
dns.gardener.cloud/dnsnames: gitea.{{ .Values.edfbuilderTargetDomain }}
dns.gardener.cloud/ttl: "600"
nginx.ingress.kubernetes.io/proxy-body-size: 512m
nginx.ingress.kubernetes.io/force-ssl-redirect: "true"
hosts:
- host: gitea.{{ .Values.edfbuilderTargetDomain }}
paths:
- path: /
pathType: Prefix
tls:
- hosts:
- gitea.{{ .Values.edfbuilderTargetDomain }}
secretName: forgejo-net-tls
image:
pullPolicy: "IfNotPresent"
# Overrides the image tag whose default is the chart appVersion.
#tag: "8.0.3"
# Adds -rootless suffix to image name
rootless: true
forgejo:
runner:
enabled: true
image:
tag: latest
# replicas: 3
config:
runner:
labels:
- docker:docker://node:16-bullseye
- self-hosted:docker://ghcr.io/catthehacker/ubuntu:act-22.04
- ubuntu-22.04:docker://ghcr.io/catthehacker/ubuntu:act-22.04

View file

@ -25,8 +25,8 @@ gitea:
queue:
TYPE: level
server:
DOMAIN: 'gitea.{{ .Values.edfbuilderTargetDomain }}'
ROOT_URL: 'https://gitea.{{ .Values.edfbuilderTargetDomain }}:443'
DOMAIN: 'gitea.{{{ .Env.DOMAIN }}}'
ROOT_URL: 'https://gitea.{{{ .Env.DOMAIN }}}:443'
service:
ssh:
@ -34,23 +34,6 @@ service:
nodePort: 32222
externalTrafficPolicy: Local
ingress:
# NOTE: The ingress is generated in a later step for path based routing feature See: hack/argo-cd/generate-manifests.sh
enabled: true
className: nginx
annotations:
nginx.ingress.kubernetes.io/proxy-body-size: 512m
nginx.ingress.kubernetes.io/force-ssl-redirect: "true"
hosts:
- host: gitea.{{ .Values.edfbuilderTargetDomain }}
paths:
- path: /
pathType: Prefix
tls:
- hosts:
- gitea.{{ .Values.edfbuilderTargetDomain }}
secretName: forgejo-net-tls
image:
pullPolicy: "IfNotPresent"
# Overrides the image tag whose default is the chart appVersion.

View file

@ -0,0 +1,22 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: ingress-apps
namespace: argocd
labels:
example: ref-implementation
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
destination:
server: "https://kubernetes.default.svc"
source:
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/core/ingress-apps"
project: default
syncPolicy:
automated:
selfHeal: true
syncOptions:
- CreateNamespace=true

View file

@ -0,0 +1,31 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
nginx.ingress.kubernetes.io/rewrite-target: /$2
nginx.ingress.kubernetes.io/use-regex: "true"
name: argo-workflows-ingress
namespace: argo
spec:
ingressClassName: nginx
rules:
- host: localhost
http:
paths:
- backend:
service:
name: argo-server
port:
name: web
path: /argo-workflows(/|$)(.*)
pathType: ImplementationSpecific
- host: {{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: argo-server
port:
name: web
path: /argo-workflows(/|$)(.*)
pathType: ImplementationSpecific

View file

@ -0,0 +1,32 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
nginx.ingress.kubernetes.io/backend-protocol: HTTP
nginx.ingress.kubernetes.io/force-ssl-redirect: "true"
nginx.ingress.kubernetes.io/rewrite-target: /$2
nginx.ingress.kubernetes.io/use-regex: "true"
{{{ if eq .Env.CLUSTER_TYPE "osc" }}}
dns.gardener.cloud/class: garden
dns.gardener.cloud/dnsnames: {{{ .Env.DOMAIN }}}
dns.gardener.cloud/ttl: "600"
{{{ end }}}
name: argocd-server
namespace: argocd
spec:
ingressClassName: nginx
rules:
- host: {{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: argocd-server
port:
number: 80
path: /argocd(/|$)(.*)
pathType: ImplementationSpecific
tls:
- hosts:
- {{{ .Env.DOMAIN }}}
secretName: argocd-net-tls

View file

@ -0,0 +1,28 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: backstage
namespace: backstage
spec:
ingressClassName: nginx
rules:
- host: localhost
http:
paths:
- backend:
service:
name: backstage
port:
name: http
path: /
pathType: Prefix
- host: {{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: backstage
port:
name: http
path: /
pathType: Prefix

View file

@ -0,0 +1,18 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: fibonacci-service
namespace: fibonacci-app
spec:
ingressClassName: nginx
rules:
- host: {{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: fibonacci-service
port:
number: 9090
path: /fibonacci
pathType: Prefix

View file

@ -0,0 +1,30 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
nginx.ingress.kubernetes.io/force-ssl-redirect: "true"
nginx.ingress.kubernetes.io/proxy-body-size: 512m
{{{ if eq .Env.CLUSTER_TYPE "osc" }}}
dns.gardener.cloud/class: garden
dns.gardener.cloud/dnsnames: gitea.{{{ .Env.DOMAIN }}}
dns.gardener.cloud/ttl: "600"
{{{ end }}}
name: forgejo
namespace: gitea
spec:
ingressClassName: nginx
rules:
- host: gitea.{{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: forgejo-http
port:
number: 3000
path: /
pathType: Prefix
tls:
- hosts:
- gitea.{{{ .Env.DOMAIN }}}
secretName: forgejo-net-tls

View file

@ -0,0 +1,28 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: keycloak-ingress-localhost
namespace: keycloak
spec:
ingressClassName: nginx
rules:
- host: localhost
http:
paths:
- backend:
service:
name: keycloak
port:
name: http
path: /keycloak
pathType: ImplementationSpecific
- host: {{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: keycloak
port:
name: http
path: /keycloak
pathType: ImplementationSpecific

View file

@ -0,0 +1,18 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: kube-prometheus-stack-grafana
namespace: monitoring
spec:
ingressClassName: nginx
rules:
- host: {{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: kube-prometheus-stack-grafana
port:
number: 80
path: /grafana
pathType: Prefix

View file

@ -0,0 +1,24 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: minio-console
namespace: minio-backup
{{{ if eq .Env.CLUSTER_TYPE "osc" }}}
annotations:
dns.gardener.cloud/class: garden
dns.gardener.cloud/dnsnames: minio-backup.{{{ .Env.DOMAIN }}}
dns.gardener.cloud/ttl: "600"
{{{ end }}}
spec:
ingressClassName: nginx
rules:
- host: minio-backup.{{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: minio-console
port:
number: 9001
path: /
pathType: Prefix

View file

@ -0,0 +1,24 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: openbao
namespace: openbao
{{{ if eq .Env.CLUSTER_TYPE "osc" }}}
annotations:
dns.gardener.cloud/class: garden
dns.gardener.cloud/dnsnames: openbao.{{{ .Env.DOMAIN }}}
dns.gardener.cloud/ttl: "600"
{{{ end }}}
spec:
ingressClassName: nginx
rules:
- host: openbao.{{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: openbao
port:
number: 8200
path: /
pathType: Prefix

View file

@ -22,6 +22,6 @@ spec:
helm:
valueFiles:
- $values/stacks/core/ingress-nginx/values.yaml
- repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values

View file

@ -1,16 +0,0 @@
controller:
updateStrategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 1
ingressClassResource:
name: nginx
# added for idpbuilder
allowSnippetAnnotations: true
# added for idpbuilder
config:
proxy-buffer-size: 32k
use-forwarded-headers: "true"

View file

@ -3,12 +3,32 @@ controller:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 1
ingressClassResource:
name: nginx
# added for idpbuilder
allowSnippetAnnotations: true
# added for idpbuilder
config:
proxy-buffer-size: 32k
use-forwarded-headers: "true"
# monitoring nginx
metrics:
enabled: true
serviceMonitor:
additionalLabels:
release: "ingress-nginx"
enabled: true
{{{ if eq .Env.CLUSTER_TYPE "kind" }}}
hostPort:
enabled: true
terminationGracePeriodSeconds: 0
service:
type: NodePort
watchIngressWithoutClass: true
nodeSelector:
ingress-ready: "true"
@ -26,11 +46,4 @@ controller:
publish-status-address: localhost
# added for idpbuilder
enable-ssl-passthrough: ""
# added for idpbuilder
allowSnippetAnnotations: true
# added for idpbuilder
config:
proxy-buffer-size: 32k
use-forwarded-headers: "true"
{{{ end }}}

View file

@ -0,0 +1,126 @@
# Local Backup with Velero and Minio
This is example is adapted from the original icpbuilder stack.
The two significant changes from the original were made:
* disabled `hostPath` mount to persist backups within kind, since backups do not work sufficiently in this example due to PVC issues, see below.
* renamed `minio` namespace to `minio-backup` so it does not collide with other minio examples.
Within kind, it can only backup kubernetes objects. Data from PVC's is skipped, see below why.
[Velero](https://velero.io/) requires some compatible storage providers as its backup target. This local installation uses [MinIO](https://min.io/) as an example.
However, MinIO is not officially supported by Velero but works due to S3 compatibility.
The current setup does NOT persist backups but stores them in MinIO's PVCs. Proper backups should configure external storage, see [Supported Providers](https://velero.io/docs/main/supported-providers/).
## Installation
The stack is installed as part of the `./example.sh` run.
In order to persist a local backup you have to mount a local directory within `main.go`:
```yaml
nodes:
- role: control-plane
extraMounts:
- hostPath: /some/path/backup # replace with your own path
containerPath: /backup
```
Kind creates the directory on the host but you might have to adjust the permissions, otherwise the minio pod fails to start.
## Using it
After the installation velero and minio should be visible in ArgoCD.
During the installation credentials for minio are generated and shared with velero. You can access them manually:
```bash
kubectl -n minio-backup get secret root-creds -o go-template='{{ range $key, $value := .data }}{{ printf "%s: %s\n" $key ($value | base64decode) }}{{ end }}'
# example output
# rootPassword: aKKZzLnyry6OYZts17vMTf32H5ghFL4WYgu6bHujm
# rootUser: ge8019yksArb7BICt3MLY9
```
A bucket in minio was created and velero uses it for its backups by default, see helm `values.yaml` files.
### Backup and Restore
Backups and subsequent restores can be scheduled by either using the velero cli or by creating CRD objects.
Check the `./demo` directory for equivalent CRD manifests.
Create a backup of the backstage namespace, see `schedule` task for more permanent setups:
```shell
velero backup create backstage-backup --include-namespaces backstage
```
There are more options to create a fine granular backup and to set the backup storage.
See velero's docs for details.
Check the backup with:
```shell
velero backup get
```
To get more details on the backup you need to be able to connect to velero's backup storage, i.e. minio.
Using `kubefwd` here helps a lot (this is not necessary for restore).
```shell
kubefwd services -n minio-backup
```
More details with `describe` and `logs`:
```shell
velero backup describe backstage-backup --details
velero backup logs backstage-backup
```
Restore the backup into the original namespace, you might want to delete the existing namespace beforehand:
```shell
kubectl delete namespace backstage
velero restore create --from-backup backstage-backup
```
When restoring, velero does not replace existing objects in the backup target.
ArgoCD does pickup on the changes and also validates that the backup is in sync.
## Issues with Persistent Volumes
Velero has no issue to backup kubernetes objects like Deployments, ConfigMaps, etc. since they are just yaml/json definitions.
Volumes containing data are, however, more complex. The preferred type of backup are kubernetes' VolumeSnapshots as they consistently store the state
of a volume at a given point in time in an atomic action. Those snapshots live within the cluster and are subsequently downloaded into one of velero's
storage backends for safekeeping.
However, VolumeSnapshots are only possible on storage backends that support them via CSI drivers.
Backends like `nfs` or `hostPath` do NOT support them. Here, velero uses an alternative method
called [File System Backups](https://velero.io/docs/main/file-system-backup/).
In essence, this a simple copy operation based on the file system. Even though
this uses more sophisticated tooling under the hood, i.e. kopia, it is not
possible to create a backup in an atomic transaction. Thus, the resulting backup
might be inconsistent.
Furthermore, for file system backups to work velero installs a node-agent as a
DaemonSet on each Kubernetes node. The agent is aware of the node's internal
storage and accesses the directories on the host directly to copy the files.
This is not supported for hostPath volumes as they mount an arbitrary path
on the host. In theory, a backup is possible but due extra config and security
considerations intentionally skipped. Kind's local-path provider storage uses
a hostPath and is thus not supported for any kind of backup.
## TODOs
* The MinIO -backup installation is only intended as an example and must either
be configured properly or replaced.
* The current example does not automatically schedule backups.
* velero chart must be properly parameterized

View file

@ -0,0 +1,9 @@
# velero backup create backstage-backup --include-namespaces backstage
apiVersion: velero.io/v1
kind: Backup
metadata:
name: backstage-backup
namespace: velero
spec:
includedNamespaces:
- 'backstage'

View file

@ -0,0 +1,10 @@
# velero restore create --from-backup backstage-backup
apiVersion: velero.io/v1
kind: Restore
metadata:
name: backstage-backup
namespace: velero
spec:
backupName: backstage-backup
includedNamespaces:
- 'backstage'

View file

@ -0,0 +1,33 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: minio
namespace: argocd
labels:
env: dev
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
project: default
sources:
- repoURL: 'https://charts.min.io'
targetRevision: 5.0.15
helm:
releaseName: minio
valueFiles:
- $values/stacks/local-backup/minio/helm/values.yaml
chart: minio
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/local-backup/minio/manifests"
destination:
server: "https://kubernetes.default.svc"
namespace: minio-backup
syncPolicy:
syncOptions:
- CreateNamespace=true
automated:
selfHeal: true

View file

@ -0,0 +1,17 @@
replicas: 1
mode: standalone
resources:
requests:
memory: 128Mi
persistence:
enabled: true
storageClass: standard
size: 512Mi
# volumeName: backup # re-enable this to mount a local host path, see minio-pv.yaml
buckets:
- name: edfbuilder-backups
existingSecret: root-creds

View file

@ -0,0 +1,13 @@
# re-enable this config to mount a local host path, see `../helm/values.yaml`
# apiVersion: v1
# kind: PersistentVolume
# metadata:
# name: backup
# spec:
# storageClassName: standard
# accessModes:
# - ReadWriteOnce
# capacity:
# storage: 512Mi
# hostPath:
# path: /backup

View file

@ -0,0 +1,154 @@
apiVersion: v1
kind: ServiceAccount
metadata:
name: secret-sync
namespace: minio-backup
annotations:
argocd.argoproj.io/hook: Sync
argocd.argoproj.io/sync-wave: "-20"
---
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: secret-sync
namespace: minio-backup
annotations:
argocd.argoproj.io/hook: Sync
argocd.argoproj.io/sync-wave: "-20"
rules:
- apiGroups: [""]
resources: ["secrets"]
verbs: ["get", "create", "update", "patch"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: secret-sync
namespace: minio-backup
annotations:
argocd.argoproj.io/hook: Sync
argocd.argoproj.io/sync-wave: "-20"
subjects:
- kind: ServiceAccount
name: secret-sync
namespace: minio-backup
roleRef:
kind: Role
name: secret-sync
apiGroup: rbac.authorization.k8s.io
---
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: secret-sync
namespace: velero
annotations:
argocd.argoproj.io/hook: Sync
argocd.argoproj.io/sync-wave: "-20"
rules:
- apiGroups: [""]
resources: ["secrets"]
verbs: ["get", "create", "update", "patch"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: secret-sync
namespace: velero
annotations:
argocd.argoproj.io/hook: Sync
argocd.argoproj.io/sync-wave: "-20"
subjects:
- kind: ServiceAccount
name: secret-sync
namespace: minio-backup
roleRef:
kind: Role
name: secret-sync
apiGroup: rbac.authorization.k8s.io
---
apiVersion: batch/v1
kind: Job
metadata:
name: secret-sync
namespace: minio-backup
annotations:
argocd.argoproj.io/hook: PostSync
spec:
template:
metadata:
generateName: secret-sync
spec:
serviceAccountName: secret-sync
restartPolicy: Never
containers:
- name: kubectl
image: docker.io/bitnami/kubectl
command: ["/bin/bash", "-c"]
args:
- |
set -e
kubectl get secrets -n minio-backup root-creds -o json > /tmp/secret
ACCESS=$(jq -r '.data.rootUser | @base64d' /tmp/secret)
SECRET=$(jq -r '.data.rootPassword | @base64d' /tmp/secret)
echo \
"apiVersion: v1
kind: Secret
metadata:
name: secret-key
namespace: velero
type: Opaque
stringData:
aws: |
[default]
aws_access_key_id=${ACCESS}
aws_secret_access_key=${SECRET}
" > /tmp/secret.yaml
kubectl apply -f /tmp/secret.yaml
---
apiVersion: batch/v1
kind: Job
metadata:
name: minio-root-creds
namespace: minio-backup
annotations:
argocd.argoproj.io/hook: Sync
argocd.argoproj.io/sync-wave: "-10"
spec:
template:
metadata:
generateName: minio-root-creds
spec:
serviceAccountName: secret-sync
restartPolicy: Never
containers:
- name: kubectl
image: docker.io/bitnami/kubectl
command: ["/bin/bash", "-c"]
args:
- |
kubectl get secrets -n minio-backup root-creds
if [ $? -eq 0 ]; then
exit 0
fi
set -e
NAME=$(openssl rand -base64 24)
PASS=$(openssl rand -base64 36)
echo \
"apiVersion: v1
kind: Secret
metadata:
name: root-creds
namespace: minio-backup
type: Opaque
stringData:
rootUser: "${NAME}"
rootPassword: "${PASS}"
" > /tmp/secret.yaml
kubectl apply -f /tmp/secret.yaml

View file

@ -0,0 +1,31 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: velero
namespace: argocd
labels:
env: dev
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
project: default
sources:
- repoURL: 'https://vmware-tanzu.github.io/helm-charts'
targetRevision: 8.0.0
helm:
releaseName: velero
valueFiles:
- $values/stacks/local-backup/velero/helm/values.yaml
chart: velero
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values
destination:
server: "https://kubernetes.default.svc"
namespace: velero
syncPolicy:
syncOptions:
- CreateNamespace=true
automated:
prune: true
selfHeal: true

View file

@ -0,0 +1,25 @@
resources:
requests:
memory: 128Mi
initContainers:
- name: velero-plugin-for-aws
image: velero/velero-plugin-for-aws:v1.11.0
imagePullPolicy: IfNotPresent
volumeMounts:
- mountPath: /target
name: plugins
# snapshotsEnabled: false # create snapshot crd?
# deployNodeAgent: true # install node agent as daemonset for file system backups?
configuration:
# defaultVolumesToFsBackup: true # backup pod volumes via fsb without explicit annotation?
backupStorageLocation:
- name: default
provider: aws
bucket: edfbuilder-backups
credential:
name: secret-key # this key is created within the minio-backup/secret-sync and injected into the velero namespace
key: aws
config:
region: minio
s3Url: http://minio.minio-backup.svc.cluster.local:9000 # internal resolution, external access for velero cli via fwd
s3ForcePathStyle: "true"

View file

@ -0,0 +1,25 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: grafana-dashboards
namespace: argocd
labels:
env: dev
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
project: default
source:
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/monitoring/kube-prometheus/dashboards"
destination:
server: "https://kubernetes.default.svc"
namespace: monitoring
syncPolicy:
syncOptions:
- CreateNamespace=true
automated:
selfHeal: true
retry:
limit: -1

View file

@ -14,7 +14,7 @@ spec:
selfHeal: true
syncOptions:
- CreateNamespace=true
- ServerSideApply=true
- ServerSideApply=true # do not copy metdata, since (because of its large size) it can lead to sync failure
destination:
name: in-cluster
namespace: monitoring
@ -25,6 +25,6 @@ spec:
helm:
valueFiles:
- $values/stacks/monitoring/kube-prometheus/values.yaml
- repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values

View file

@ -0,0 +1,268 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: grafana-dashboard-1
labels:
grafana_dashboard: "1"
data:
k8s-dashboard-01.json: |
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 1,
"links": [
],
"panels": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 0
},
"id": 5,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"expr": "{app=\"crossplane\"}",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: App crossplane",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 8
},
"id": 4,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"expr": "{app=\"argo-server\"}",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: App argo-server",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 16
},
"id": 3,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"expr": "{app=\"forgejo\"}",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: App forgejo",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 24
},
"id": 2,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"expr": "{app=\"backstage\"}",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: App backstage",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 32
},
"id": 1,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"expr": "{app=\"shoot-control-plane\"}",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: App shoot-control-plane",
"type": "logs"
}
],
"preload": false,
"schemaVersion": 40,
"tags": [
],
"templating": {
"list": [
]
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {
},
"timezone": "browser",
"title": "Loki Logs: Apps",
"uid": "ee4iuluru756of",
"version": 2,
"weekStart": ""
}

View file

@ -0,0 +1,845 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: grafana-dashboard-2
labels:
grafana_dashboard: "1"
data:
k8s-dashboard-02.json: |
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 30,
"links": [
],
"panels": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 0
},
"id": 19,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"server\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component server",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 8
},
"id": 17,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"repo-server\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component repo-server",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 16
},
"id": 16,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"redis\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component redis",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 24
},
"id": 15,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"query-frontend\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component query-frontend",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 32
},
"id": 14,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"querier\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component querier",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 40
},
"id": 13,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"prometheus-operator-webhook\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component prometheus-operator-webhook",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 48
},
"id": 12,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"prometheus-operator\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component prometheus-operator",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 56
},
"id": 11,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"metrics\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component metrics",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 64
},
"id": 10,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"kube-scheduler\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component kube-scheduler",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 72
},
"id": 9,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"kube-controller-manager\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component kube-controller-manager",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 80
},
"id": 8,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"kube-apiserver\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component kube-apiserver",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 88
},
"id": 7,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"ingester\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component ingester",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 96
},
"id": 6,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"gateway\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component gateway",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 104
},
"id": 5,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"etcd\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component etcd",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 112
},
"id": 4,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"distributor\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component distributor",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 120
},
"id": 3,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"controller\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component controller",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 128
},
"id": 2,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"cloud-infrastructure-controller\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component cloud-infrastructure-controller",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 136
},
"id": 1,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{component=\"applicationset-controller\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Component application-controller",
"type": "logs"
}
],
"preload": false,
"schemaVersion": 40,
"tags": [
],
"templating": {
"list": [
]
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {
},
"timezone": "browser",
"title": "Loki Logs: Components",
"uid": "ae4zuyp1kui9sc",
"version": 2,
"weekStart": ""
}

View file

@ -0,0 +1,537 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: grafana-dashboard-3
labels:
grafana_dashboard: "1"
data:
k8s-dashboard-03.json: |
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 31,
"links": [
],
"panels": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 0
},
"id": 11,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"repo-server\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container repo-server",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 8
},
"id": 10,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"promtail\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container promtail",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 16
},
"id": 9,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"prometheus\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container prometheus",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 24
},
"id": 8,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"postgres\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container postgres",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 32
},
"id": 7,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"kube-prometheus-stack\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container kube-prometheus-stack",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 40
},
"id": 6,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"keycloak\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container keycloak",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 48
},
"id": 5,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"grafana\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container grafana",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 56
},
"id": 4,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"forgejo\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container forgejo",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 64
},
"id": 3,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"crossplane\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container crossplane",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 72
},
"id": 2,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"backstage\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container backstage",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"fieldConfig": {
"defaults": {
},
"overrides": [
]
},
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 80
},
"id": 1,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"editorMode": "builder",
"expr": "{container=\"argo-server\"} |= ``",
"queryType": "range",
"refId": "A"
}
],
"title": "Logs: Container argo-server",
"type": "logs"
}
],
"preload": false,
"schemaVersion": 40,
"tags": [
],
"templating": {
"list": [
]
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {
},
"timezone": "browser",
"title": "Loki Logs: Container",
"uid": "ee50bcaehmv40e",
"version": 2,
"weekStart": ""
}

View file

@ -6,15 +6,40 @@ grafana:
userKey: admin-user
passwordKey: admin-password
defaultDashboardsTimezone: Europe/Berlin
additionalDataSources:
- name: Loki
type: loki
url: http://loki-loki-distributed-gateway.monitoring:80
# syncPolicy:
# syncOptions:
# - ServerSideApply=true
sidecar:
dashboards:
enabled: true
label: grafana_dashboard
folder: /tmp/dashboards
updateIntervalSeconds: 10
folderAnnotation: grafana_folder
provider:
allowUiUpdates: true
foldersFromFilesStructure: true
grafana.ini:
server:
domain: {{ .Values.edfbuilderTargetDomain }}
domain: {{{ .Env.DOMAIN }}}
root_url: "%(protocol)s://%(domain)s/grafana"
serve_from_sub_path: true
ingress:
serviceMonitor:
# If true, a ServiceMonitor CRD is created for a prometheus operator https://github.com/coreos/prometheus-operator
enabled: true
ingressClassName: nginx
hosts:
- {{ .Values.edfbuilderTargetDomain }}
path: /grafana
#monitoring nginx
prometheus:
prometheusSpec:
podMonitorSelectorNilUsesHelmValues: false
serviceMonitorSelectorNilUsesHelmValues: false

View file

@ -0,0 +1,34 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: loki
namespace: argocd
labels:
env: dev
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
project: default
syncPolicy:
automated:
selfHeal: true
syncOptions:
- CreateNamespace=true
destination:
name: in-cluster
namespace: monitoring
sources:
- repoURL: https://github.com/grafana/helm-charts
path: charts/loki-distributed
targetRevision: HEAD
helm:
valueFiles:
- $values/stacks/monitoring/loki/values.yaml
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values
## consider using the following version, if it works again
#- repoURL: https://github.com/grafana/loki
# path: production/helm/loki

View file

@ -0,0 +1,7 @@
loki:
commonConfig:
replication_factor: 1
auth_enabled: false
# storageConfig:
# filesystem: null

View file

@ -0,0 +1,29 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: promtail
namespace: argocd
labels:
env: dev
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
project: default
syncPolicy:
automated:
selfHeal: true
syncOptions:
- CreateNamespace=true
destination:
name: in-cluster
namespace: monitoring
sources:
- repoURL: https://github.com/grafana/helm-charts
path: charts/promtail
targetRevision: HEAD
helm:
valueFiles:
- $values/stacks/monitoring/promtail/values.yaml
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values

View file

@ -0,0 +1,45 @@
# -- Overrides the chart's name
nameOverride: null
# -- Overrides the chart's computed fullname
fullnameOverride: null
global:
# -- Allow parent charts to override registry hostname
imageRegistry: ""
# -- Allow parent charts to override registry credentials
imagePullSecrets: []
daemonset:
# -- Deploys Promtail as a DaemonSet
enabled: true
autoscaling:
# -- Creates a VerticalPodAutoscaler for the daemonset
enabled: false
deployment:
# -- Deploys Promtail as a Deployment
enabled: false
config:
enabled: true
logLevel: info
logFormat: logfmt
serverPort: 3101
clients:
- url: http://loki-loki-distributed-gateway/loki/api/v1/push
scrape_configs:
- job_name: authlog
static_configs:
- targets:
- authlog
labels:
job: authlog
__path__: /logs/auth.log
- job_name: syslog
static_configs:
- targets:
- syslog
labels:
job: syslog
__path__: /logs/syslog

View file

@ -30,7 +30,7 @@ idpbuilder create --use-path-routing \
--package https://github.com/cnoe-io/stacks//ref-implementation
```
This will take ~6 minutes for everything to come up. To track the progress, you can go to the [ArgoCD UI](https://{{ .Values.edfbuilderTargetDomain }}:8443/argocd/applications).
This will take ~6 minutes for everything to come up. To track the progress, you can go to the [ArgoCD UI](https://{{{ .Env.DOMAIN }}}:8443/argocd/applications).
### What was installed?
@ -47,15 +47,15 @@ The only package that cannot be removed this way is Keycloak because other packa
#### Accessing UIs
- Argo CD: https://{{ .Values.edfbuilderTargetDomain }}:8443/argocd
- Argo Workflows: https://{{ .Values.edfbuilderTargetDomain }}:8443/argo-workflows
- Backstage: https://{{ .Values.edfbuilderTargetDomain }}:8443/
- Gitea: https://{{ .Values.edfbuilderTargetDomain }}:8443/gitea
- Keycloak: https://{{ .Values.edfbuilderTargetDomain }}:8443/keycloak/admin/master/console/
- Argo CD: https://{{{ .Env.DOMAIN }}}:8443/argocd
- Argo Workflows: https://{{{ .Env.DOMAIN }}}:8443/argo-workflows
- Backstage: https://{{{ .Env.DOMAIN }}}:8443/
- Gitea: https://{{{ .Env.DOMAIN }}}:8443/gitea
- Keycloak: https://{{{ .Env.DOMAIN }}}:8443/keycloak/admin/master/console/
# Using it
For this example, we will walk through a few demonstrations. Once applications are ready, go to the [backstage URL](https://{{ .Values.edfbuilderTargetDomain }}:8443).
For this example, we will walk through a few demonstrations. Once applications are ready, go to the [backstage URL](https://{{{ .Env.DOMAIN }}}:8443).
Click on the Sign-In button, you will be asked to log into the Keycloak instance. There are two users set up in this
configuration, and their password can be retrieved with the following command:
@ -70,7 +70,7 @@ Both users use the same password retrieved above.
If you want to create a new user or change existing users:
1. Go to the [Keycloak UI](https://{{ .Values.edfbuilderTargetDomain }}:8443/keycloak/admin/master/console/).
1. Go to the [Keycloak UI](https://{{{ .Env.DOMAIN }}}:8443/keycloak/admin/master/console/).
Login with the username `cnoe-admin`. Password is the `KEYCLOAK_ADMIN_PASSWORD` field from the command above.
2. Select `cnoe` from the realms drop down menu.
3. Select users tab.
@ -97,9 +97,9 @@ You can click on the ArgoCD Application name to see more details.
### What just happened?
1. Backstage created [a git repository](https://{{ .Values.edfbuilderTargetDomain }}:8443/gitea/giteaAdmin/demo), then pushed templated contents to it.
2. Backstage created [an ArgoCD Application](https://{{ .Values.edfbuilderTargetDomain }}:8443/argocd/applications/argocd/demo?) and pointed it to the git repository.
3. Backstage registered the application as [a component](https://{{ .Values.edfbuilderTargetDomain }}:8443/gitea/giteaAdmin/demo/src/branch/main/catalog-info.yaml) in Backstage.
1. Backstage created [a git repository](https://{{{ .Env.DOMAIN }}}:8443/gitea/giteaAdmin/demo), then pushed templated contents to it.
2. Backstage created [an ArgoCD Application](https://{{{ .Env.DOMAIN }}}:8443/argocd/applications/argocd/demo?) and pointed it to the git repository.
3. Backstage registered the application as [a component](https://{{{ .Env.DOMAIN }}}:8443/gitea/giteaAdmin/demo/src/branch/main/catalog-info.yaml) in Backstage.
4. ArgoCD deployed the manifests stored in the repo to the cluster.
5. Backstage retrieved application health from ArgoCD API, then displayed it.

View file

@ -10,7 +10,7 @@ metadata:
spec:
project: default
source:
repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/ref-implementation/argo-workflows/manifests/dev"
destination:

View file

@ -1,31 +0,0 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: argo-workflows-ingress
namespace: argo
annotations:
nginx.ingress.kubernetes.io/use-regex: "true"
nginx.ingress.kubernetes.io/rewrite-target: /$2
spec:
ingressClassName: "nginx"
rules:
- host: localhost
http:
paths:
- path: /argo-workflows(/|$)(.*)
pathType: ImplementationSpecific
backend:
service:
name: argo-server
port:
name: web
- host: {{ .Values.edfbuilderTargetDomain }}
http:
paths:
- path: /argo-workflows(/|$)(.*)
pathType: ImplementationSpecific
backend:
service:
name: argo-server
port:
name: web

View file

@ -1,7 +1,6 @@
resources:
- ../base
- external-secret.yaml
- ingress.yaml
- sa-admin.yaml
patches:
- path: patches/cm-argo-workflows.yaml

View file

@ -7,14 +7,14 @@ data:
config: |
sso:
insecureSkipVerify: true
issuer: https://{{ .Values.edfbuilderTargetDomain }}/keycloak/realms/cnoe
issuer: https://{{{ .Env.DOMAIN }}}/keycloak/realms/cnoe
clientId:
name: keycloak-oidc
key: client-id
clientSecret:
name: keycloak-oidc
key: secret-key
redirectUrl: https://{{ .Values.edfbuilderTargetDomain }}:443/argo-workflows/oauth2/callback
redirectUrl: https://{{{ .Env.DOMAIN }}}:443/argo-workflows/oauth2/callback
rbac:
enabled: true
scopes:

View file

@ -10,7 +10,7 @@ metadata:
spec:
project: default
source:
repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/ref-implementation/backstage-templates/entities"
directory:

View file

@ -20,7 +20,7 @@ metadata:
backstage.io/kubernetes-namespace: default
argocd/app-name: ${{values.name | dump}}
links:
- url: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443
- url: https://gitea.{{{ .Env.DOMAIN }}}:443
title: Repo URL
icon: github
spec:

View file

@ -100,7 +100,7 @@ spec:
input:
description: This is an example app
# Hard coded value for this demo purposes only.
repoUrl: gitea.{{ .Values.edfbuilderTargetDomain }}:443/?repo=${{parameters.name}}
repoUrl: gitea.{{{ .Env.DOMAIN }}}:443/?repo=${{parameters.name}}
defaultBranch: main
- id: create-argocd-app
name: Create ArgoCD App
@ -111,7 +111,7 @@ spec:
argoInstance: in-cluster
projectName: default
# necessary until we generate our own cert
repoUrl: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443/giteaAdmin/${{parameters.name}}
repoUrl: https://gitea.{{{ .Env.DOMAIN }}}:443/giteaAdmin/${{parameters.name}}
path: "kustomize/base"
- id: register
name: Register

View file

@ -14,7 +14,7 @@ metadata:
apache-spark.cnoe.io/label-selector: env=dev,entity-id=${{values.name}}
apache-spark.cnoe.io/cluster-name: local
links:
- url: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443
- url: https://gitea.{{{ .Env.DOMAIN }}}:443
title: Repo URL
icon: github
spec:

View file

@ -35,7 +35,7 @@ spec:
input:
description: This is an example app
# Hard coded value for this demo purposes only.
repoUrl: gitea.{{ .Values.edfbuilderTargetDomain }}:443/?repo=${{parameters.name}}
repoUrl: gitea.{{{ .Env.DOMAIN }}}:443/?repo=${{parameters.name}}
defaultBranch: main
- id: create-argocd-app
name: Create ArgoCD App
@ -46,7 +46,7 @@ spec:
argoInstance: in-cluster
projectName: default
# necessary until we generate our own cert
repoUrl: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443/giteaAdmin/${{parameters.name}}
repoUrl: https://gitea.{{{ .Env.DOMAIN }}}:443/giteaAdmin/${{parameters.name}}
path: "manifests"
- id: register
name: Register

View file

@ -10,7 +10,7 @@ metadata:
backstage.io/kubernetes-namespace: default
argocd/app-name: ${{values.name | dump}}
links:
- url: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443
- url: https://gitea.{{{ .Env.DOMAIN }}}:443
title: Repo URL
icon: github
spec:

View file

@ -31,7 +31,7 @@ spec:
input:
description: This is an example app
# Hard coded value for this demo purposes only.
repoUrl: gitea.{{ .Values.edfbuilderTargetDomain }}:443/?repo=${{parameters.name}}
repoUrl: gitea.{{{ .Env.DOMAIN }}}:443/?repo=${{parameters.name}}
defaultBranch: main
- id: create-argocd-app
name: Create ArgoCD App
@ -42,7 +42,7 @@ spec:
argoInstance: in-cluster
projectName: default
# necessary until we generate our own cert
repoUrl: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443/giteaAdmin/${{parameters.name}}
repoUrl: https://gitea.{{{ .Env.DOMAIN }}}:443/giteaAdmin/${{parameters.name}}
path: "manifests"
- id: register
name: Register

View file

@ -9,6 +9,7 @@ spec:
- ./argo-workflows/template.yaml
- ./app-with-bucket/template.yaml
- ./demo-go-hello-world/template.yaml
- ./spring-petclinic/template.yaml
---
apiVersion: backstage.io/v1alpha1
kind: Location

View file

@ -9,7 +9,7 @@ metadata:
backstage.io/kubernetes-label-selector: 'entity-id=${{ values.name }}'
backstage.io/kubernetes-namespace: gitea
links:
- url: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443
- url: https://gitea.{{{ .Env.DOMAIN }}}:443
title: Repo URL
icon: git
spec:
@ -26,7 +26,7 @@ metadata:
annotations:
backstage.io/techdocs-ref: dir:.
links:
- url: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443
- url: https://gitea.{{{ .Env.DOMAIN }}}:443
title: Gitea Repo
icon: git
spec:

View file

@ -33,7 +33,7 @@ spec:
name: Publish to Gitea
action: publish:gitea
input:
repoUrl: gitea.{{ .Values.edfbuilderTargetDomain }}:443/?repo=${{parameters.name}}
repoUrl: gitea.{{{ .Env.DOMAIN }}}:443/?repo=${{parameters.name}}
description: This is the repository for ${{ parameters.name }}
sourcePath: ./skeleton
defaultBranch: main

View file

@ -0,0 +1,94 @@
## Spring PetClinic Template
This template can deploy a fork of Spring's PetClinic and showcase the entire
development workflow from version control over build and deployment into
kubernetes to monitoring the state at runtime.
The goal is to demonstrate the migration of an existing GitHub project into the
stack by applying only minimal changes for compatibility. These changes
include:
- Overwriting the CI workflows due to compatibility with forgejo actions and
container issues, thus making builds container less.
- Extending the kubernetes manifests to use Ingress and Service objects
- Integrate Prometheus monitoring endpoint
### Backstage - Forking
The Backstage template allows you to define a URL to 'fork' from. However,
the template does not actually create a git fork but downloads the latest
`HEAD` from a given branch. The source repository has to be known by backstage
so it can apply the correct download mechanism, see `backend.integrations` in
the backstage config.
In the first step, the original source code is downloaded from the given
upstream repository. Subsequently, overrides provided by the template are
copied into the codebase replacing some workflow and deployment files.
This 'merged' codebase is pushed into a new git repository in the supplied
forgejo instance. Additionally, an ArgoCD deployment is created based on this
new given repo.
### Forgejo - CI with Forgejo Actions
As soon as the patched codebase is pushed into the forgejo git repository,
forgejo actions are triggered and start executing the three existing workflows.
However, only the two build workflows are patched to work within the current
stack, the third deployment workflow fails as it cannot start a kind cluster.
In the current configuration, workflows are by default executed in a minimal
node-debian container. This suffices in most cases to run GitHub actions.
Like in the original upstream repository on GitHub, a Gradle- and a Maven-based
workflow are started. The Gradle one only executes a simple java build while
the Maven version also includes the creation of a container image and a
trivy-based security scan.
Both workflows are patched as referencing of actions differs in forgejo actions
from GitHub actions. In GitHub Actions, actions are rerefenced as paths to
github repositories. In Forgejo Actions, this mechanism similarly refers to
actions hosted on `code.forgejo.org` even on self-hosted instances. As only a
small subset of actions is ported to `code.forgejo.org` due to licensing and
compatibility (not all GitHub actions can work with forgejo actions), forgejo
actions also allow referencing actions by URL. Thus, the action
`https://github.com/actions/setup-java` instructs the forgejo runner to
download the action from GitHub. (The default actions repository can be
overwritten)
Creating the application container within the Maven workflow is accomplished
without using 'native' container tooling, i.e. docker or podman. Besides this
being favorable as it introduces less side effects, the current stack
implementation does not support running nested containers, yet.
Furthermore, as the system uses self-signed certificates, certificate checks
are disabled throughout the system for now.
After a successful build, the container image is published into the Forgejo
container registry and can be pulled for deployment.
### ArgoCD - Deployment
The PetClinic consists of two components, a persistent PostgreSQL database and
the java application that is compiled from source. On execution of the
Backstage template an application deployment is created in ArgoCD. The
resources defined in the newly created git repository are synchronized into the
kubernetes cluster. However, as the java app container has to be built by
the CI workflows, the deployment will initially fail but become successful as
soon as the container image is available.
### Prometheus & Grafana - Monitoring
Prometheus and Grafana among others are deployed as apart of the IDP monitoring
stack. To integrate with these existing components the Backstage template adds
a ServiceMonitor definition to the deployment of the PetClinic. It instructs
Prometheus to scrape the `actuator/prometheus` endpoint in specific intervals.
The data contains jvm health data and can be visualized in Grafana.
As the upstream PetClinic on GitHub does not contain the necessary dependencies
to enable the Prometheus endpoint, the app is by default bootstrapped from
a fork that contains the `micrometer-registry-prometheus` dependency.

View file

@ -0,0 +1,32 @@
# This workflow will build a Java project with Gradle, and cache/restore any dependencies to improve the workflow execution time
# For more information see: https://docs.github.com/en/actions/use-cases-and-examples/building-and-testing/building-and-testing-java-with-gradle
name: Java CI with Gradle
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
java: [ '17' ]
steps:
- uses: actions/checkout@v4
- name: Set up JDK {% raw %}${{matrix.java}}{% endraw %}
uses: https://github.com/actions/setup-java@v4
with:
java-version: '{% raw %}${{matrix.java}}{% endraw %}'
distribution: 'adopt'
cache: maven
- name: Setup Gradle
uses: https://github.com/gradle/actions/setup-gradle@v4
- name: Build with Gradle
run: ./gradlew build

View file

@ -0,0 +1,63 @@
# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
# For more information see: https://docs.github.com/en/actions/use-cases-and-examples/building-and-testing/building-and-testing-java-with-maven
name: Java CI with Maven
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
#container:
# image: ghcr.io/catthehacker/ubuntu:act-latest # the large image
strategy:
matrix:
java: [ '17' ]
steps:
- uses: actions/checkout@v4
- name: Set up JDK {% raw %}${{matrix.java}}{% endraw %}
uses: https://github.com/actions/setup-java@v4
with:
java-version: '{% raw %}${{matrix.java}}{% endraw %}'
distribution: 'adopt'
cache: maven
- name: Build with Maven Wrapper
run: ./mvnw -B verify
- name: Build image
#run: ./mvnw spring-boot:build-image # the original image build
run: |
export CONTAINER_REPO=$(echo {% raw %}${{ env.GITHUB_REPOSITORY }}{% endraw %} | tr '[:upper:]' '[:lower:]')
./mvnw com.google.cloud.tools:jib-maven-plugin:3.4.4:build -Djib.allowInsecureRegistries=true -Dimage=gitea.{{{ .Env.DOMAIN }}}/${CONTAINER_REPO}:latest -Djib.to.auth.username={% raw %}${{ github.actor }}{% endraw %} -Djib.to.auth.password={% raw %}${{ secrets.PACKAGES_TOKEN }}{% endraw %}
- name: Build image as tar
run: |
./mvnw com.google.cloud.tools:jib-maven-plugin:3.4.4:buildTar -Djib.allowInsecureRegistries=true
# separating the trivy scan into another job is not necessary. It, however, demonstrates forgejo's compatibility with GitHub
- uses: forgejo/upload-artifact@v4
with:
name: petclinic-image
path: target/jib-image.tar
env:
NODE_TLS_REJECT_UNAUTHORIZED: 0 # This is necessary due to self signed certs for forgejo, proper setups can skip this
securityscan:
runs-on: ubuntu-latest
#container:
# image: aquasec/trivy # the container does not contain node...
steps:
- uses: forgejo/download-artifact@v4
with:
name: petclinic-image
env:
NODE_TLS_REJECT_UNAUTHORIZED: 0 # This is necessary due to self signed certs for forgejo, proper setups can skip this
- name: install trivy from deb package
run: |
wget -O trivy.deb https://github.com/aquasecurity/trivy/releases/download/v0.58.0/trivy_0.58.0_Linux-64bit.deb
DEBIAN_FRONTEND=noninteractive dpkg -i trivy.deb
- name: scan the image
run: trivy image --input jib-image.tar

View file

@ -0,0 +1,36 @@
---
apiVersion: backstage.io/v1alpha1
kind: Component
metadata:
name: ${{ values.name }}
description: This is a Backstage component created from the custom template that forks the PetClinic
annotations:
backstage.io/techdocs-ref: dir:.
backstage.io/kubernetes-label-selector: 'entity-id=${{ values.name }}'
backstage.io/kubernetes-namespace: gitea
links:
- url: https://gitea.{{{ .Env.DOMAIN }}}:443
title: Repo URL
icon: git
spec:
owner: guests
lifecycle: experimental
type: service
system: ${{ values.name | dump }}
---
apiVersion: backstage.io/v1alpha1
kind: System
metadata:
name: ${{ values.name | dump }}
description: A system for managing services created from the Gitea template.
annotations:
backstage.io/techdocs-ref: dir:.
links:
- url: https://gitea.{{{ .Env.DOMAIN }}}:443
title: Gitea Repo
icon: git
spec:
owner: guests
lifecycle: experimental
type: service

View file

@ -0,0 +1,76 @@
---
apiVersion: v1
kind: Secret
metadata:
name: demo-db
namespace: ${{ values.namespace }}
type: servicebinding.io/postgresql
stringData:
type: "postgresql"
provider: "postgresql"
host: "demo-db"
port: "5432"
database: "petclinic"
username: "user"
password: "pass"
---
apiVersion: v1
kind: Service
metadata:
name: demo-db
namespace: ${{ values.namespace }}
spec:
ports:
- port: 5432
selector:
app: demo-db
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: demo-db
namespace: ${{ values.namespace }}
labels:
app: demo-db
spec:
selector:
matchLabels:
app: demo-db
template:
metadata:
labels:
app: demo-db
spec:
containers:
- image: postgres:17
name: postgresql
env:
- name: POSTGRES_USER
valueFrom:
secretKeyRef:
name: demo-db
key: username
- name: POSTGRES_PASSWORD
valueFrom:
secretKeyRef:
name: demo-db
key: password
- name: POSTGRES_DB
valueFrom:
secretKeyRef:
name: demo-db
key: database
ports:
- containerPort: 5432
name: postgresql
livenessProbe:
tcpSocket:
port: postgresql
readinessProbe:
tcpSocket:
port: postgresql
startupProbe:
tcpSocket:
port: postgresql

View file

@ -0,0 +1,125 @@
---
apiVersion: v1
kind: Namespace
metadata:
name: ${{ values.namespace }}
---
apiVersion: v1
kind: Service
metadata:
name: petclinic
namespace: ${{ values.namespace }}
labels:
app: petclinic
spec:
type: ClusterIP
ports:
- port: 8080
targetPort: http
name: http
selector:
app: petclinic
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: ${{ values.namespace }}-petclinic
namespace: ${{ values.namespace }}
spec:
ingressClassName: nginx
rules:
- host: ${{ values.namespace }}.{{{ .Env.DOMAIN }}}
http:
paths:
- backend:
service:
name: petclinic
port:
name: http
path: /
pathType: Prefix
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: petclinic
namespace: ${{ values.namespace }}
labels:
app: petclinic
spec:
replicas: 1
selector:
matchLabels:
app: petclinic
template:
metadata:
labels:
app: petclinic
spec:
containers:
- name: workload
image: gitea.{{{ .Env.DOMAIN }}}/giteaadmin/${{ values.name }}
env:
- name: SPRING_PROFILES_ACTIVE
value: postgres
- name: POSTGRES_URL
value: jdbc:postgresql://demo-db/petclinic
- name: POSTGRES_USER
valueFrom:
secretKeyRef:
name: demo-db
key: username
- name: POSTGRES_PASS
valueFrom:
secretKeyRef:
name: demo-db
key: password
- name: SERVICE_BINDING_ROOT
value: /bindings
- name: SPRING_APPLICATION_JSON
value: |
{
"management.endpoint.health.probes.add-additional-paths": true
}
ports:
- name: http
containerPort: 8080
livenessProbe:
httpGet:
path: /livez
port: http
readinessProbe:
httpGet:
path: /readyz
port: http
volumeMounts:
- mountPath: /bindings/secret
name: binding
readOnly: true
volumes:
- name: binding
projected:
sources:
- secret:
name: demo-db
---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: ${{ values.namespace }}-petclinic-monitor
namespace: monitoring # Namespace where Prometheus is running
labels:
release: kube-prometheus-stack # this must match Prometheus' service monitor matching
spec:
selector:
matchLabels:
app: petclinic # Match your application's labels
namespaceSelector:
matchNames:
- ${{ values.namespace }} # Namespace where your app is running
endpoints:
- port: http # Name of the port in your Service
path: /actuator/prometheus # Path to your metrics endpoint
interval: 15s # How often to scrape metrics

View file

@ -0,0 +1,84 @@
apiVersion: scaffolder.backstage.io/v1beta3
kind: Template
metadata:
name: spring-petclinic
title: Spring PetClinic template
description: An example template for the scaffolder that creates a 'fork' of Spring's PetClinic
spec:
owner: user:guest
type: service
parameters:
- title: Fill in some steps
required:
- name
properties:
name:
title: Project Name
type: string
description: Unique name of the fork app
ui:autofocus: true
upstreamurl:
title: Repo to Fork
type: string
description: The URL of the repo to fork
default: https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/Patrick.Sy/ipcei-petclinic/src/branch/main
ui:emptyValue: 'https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/Patrick.Sy/ipcei-petclinic/src/branch/main'
steps:
- id: fetch-code # get the latest upstream code
name: Fetch Code
action: fetch:plain
input:
# url: https://github.com/spring-projects/spring-petclinic/tree/main
url: ${{ parameters.upstreamurl }}
- id: fetch-overrides # Apply specific overrides to add features and make modifications for compatibility
name: Fetch Overrides
action: fetch:template
input:
# url: ./skeleton/.github/workflows
# targetPath: ./.github/workflows
url: ./skeleton/
targetPath: ./
replace: true
values:
name: ${{ parameters.name }}
namespace: ${{ parameters.name }}
- id: publish
name: Publish to Gitea
action: publish:gitea
input:
repoUrl: gitea.{{{ .Env.DOMAIN }}}:443/?repo=${{parameters.name}}
description: This is the repository for ${{ parameters.name }}
sourcePath: ./
defaultBranch: main
- id: create-argocd-app
name: Create ArgoCD App
action: cnoe:create-argocd-app
input:
appName: ${{parameters.name}}
appNamespace: ${{parameters.name}}
argoInstance: in-cluster
projectName: default
# necessary until we generate our own cert
repoUrl: https://gitea.{{{ .Env.DOMAIN }}}:443/giteaAdmin/${{parameters.name}}
path: "k8s"
- id: register
name: Register in Catalog
action: catalog:register
input:
repoContentsUrl: ${{ steps['publish'].output.repoContentsUrl }}
catalogInfoPath: 'catalog-info.yaml'
output:
links:
- title: Repository
url: ${{ steps['publish'].output.remoteUrl }}
- title: Open in Catalog
icon: catalog
entityRef: ${{ steps['register'].output.entityRef }}

View file

@ -10,7 +10,7 @@ metadata:
spec:
project: default
source:
repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/ref-implementation/backstage/manifests"
destination:

View file

@ -70,7 +70,7 @@ data:
app-config.yaml: |
app:
title: CNOE Backstage
baseUrl: https://{{ .Values.edfbuilderTargetDomain }}:443
baseUrl: https://{{{ .Env.DOMAIN }}}:443
organization:
name: CNOE
backend:
@ -80,7 +80,7 @@ data:
# auth:
# keys:
# - secret: ${BACKEND_SECRET}
baseUrl: https://{{ .Values.edfbuilderTargetDomain }}:443
baseUrl: https://{{{ .Env.DOMAIN }}}:443
listen:
port: 7007
# Uncomment the following host directive to bind to specific interfaces
@ -90,7 +90,7 @@ data:
# Content-Security-Policy directives follow the Helmet format: https://helmetjs.github.io/#reference
# Default Helmet Content-Security-Policy values can be removed by setting the key to false
cors:
origin: https://{{ .Values.edfbuilderTargetDomain }}:443
origin: https://{{{ .Env.DOMAIN }}}:443
methods: [GET, HEAD, PATCH, POST, PUT, DELETE]
credentials: true
database:
@ -106,14 +106,16 @@ data:
integrations:
gitea:
- baseUrl: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443
host: gitea.{{ .Values.edfbuilderTargetDomain }}:443
- baseUrl: https://gitea.{{{ .Env.DOMAIN }}}:443
host: gitea.{{{ .Env.DOMAIN }}}:443
username: ${GITEA_USERNAME}
password: ${GITEA_PASSWORD}
- baseUrl: https://gitea.{{ .Values.edfbuilderTargetDomain }}
host: gitea.{{ .Values.edfbuilderTargetDomain }}
- baseUrl: https://gitea.{{{ .Env.DOMAIN }}}
host: gitea.{{{ .Env.DOMAIN }}}
username: ${GITEA_USERNAME}
password: ${GITEA_PASSWORD}
- baseUrl: https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live
host: forgejo.edf-bootstrap.cx.fg1.ffm.osc.live
# github:
# - host: github.com
# apps:
@ -165,7 +167,7 @@ data:
locations:
# Examples from a public GitHub repository.
- type: url
target: https://gitea.{{ .Values.edfbuilderTargetDomain }}:443/giteaAdmin/edfbuilder/raw/branch/main/stacks/ref-implementation/backstage-templates/entities/catalog-info.yaml
target: https://gitea.{{{ .Env.DOMAIN }}}:443/giteaAdmin/edfbuilder/raw/branch/main/stacks/ref-implementation/backstage-templates/entities/catalog-info.yaml
rules:
- allow: [Component, System, API, Resource, Location, Template, User, Group]
kubernetes:
@ -180,7 +182,7 @@ data:
- type: 'config'
instances:
- name: in-cluster
url: https://{{ .Values.edfbuilderTargetDomain }}:443/argocd
url: https://{{{ .Env.DOMAIN }}}:443/argocd
username: admin
password: ${ARGOCD_ADMIN_PASSWORD}
argoWorkflows:
@ -336,6 +338,7 @@ spec:
volumeMounts:
- name: data
mountPath: /var/lib/postgresql/data
subPath: postgres
volumeClaimTemplates:
- metadata:
name: data
@ -373,14 +376,14 @@ spec:
template:
engineVersion: v2
data:
BACKSTAGE_FRONTEND_URL: https://{{ .Values.edfbuilderTargetDomain }}:443/backstage
BACKSTAGE_FRONTEND_URL: https://{{{ .Env.DOMAIN }}}:443/backstage
POSTGRES_HOST: postgresql.backstage.svc.cluster.local
POSTGRES_PORT: '5432'
POSTGRES_DB: backstage
POSTGRES_USER: backstage
POSTGRES_PASSWORD: "{{.POSTGRES_PASSWORD}}"
ARGO_WORKFLOWS_URL: https://{{ .Values.edfbuilderTargetDomain }}:443/argo-workflows
KEYCLOAK_NAME_METADATA: https://{{ .Values.edfbuilderTargetDomain }}:443/keycloak/realms/cnoe/.well-known/openid-configuration
ARGO_WORKFLOWS_URL: https://{{{ .Env.DOMAIN }}}:443/argo-workflows
KEYCLOAK_NAME_METADATA: https://{{{ .Env.DOMAIN }}}:443/keycloak/realms/cnoe/.well-known/openid-configuration
KEYCLOAK_CLIENT_SECRET: "{{.BACKSTAGE_CLIENT_SECRET}}"
ARGOCD_AUTH_TOKEN: "argocd.token={{.ARGOCD_SESSION_TOKEN}}"
ARGO_CD_URL: 'https://argocd-server.argocd.svc.cluster.local/api/v1/'
@ -424,32 +427,3 @@ spec:
remoteRef:
key: gitea-credential
property: password
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: backstage
namespace: backstage
spec:
ingressClassName: "nginx"
rules:
- host: localhost
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: backstage
port:
name: http
- host: {{ .Values.edfbuilderTargetDomain }}
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: backstage
port:
name: http

View file

@ -69,5 +69,5 @@ From here on, you can follow the instructions in the [README](./README.md) file.
echo https://${CODESPACE_NAME}-8080.${GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN}
```
For example, if you need to access Argo Workflows UI, instead of going to `https://{{ .Values.edfbuilderTargetDomain }}:8443/argo`,
For example, if you need to access Argo Workflows UI, instead of going to `https://{{{ .Env.DOMAIN }}}:8443/argo`,
you go to `https://${CODESPACE_NAME}-8080.${GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN}/argo`

View file

@ -12,7 +12,7 @@ spec:
namespace: external-secrets
server: "https://kubernetes.default.svc"
source:
repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/ref-implementation/external-secrets/manifests"
project: default

View file

@ -0,0 +1,9 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
namespace: openbao
name: external-secrets-role
rules:
- apiGroups: [""]
resources: ["secrets"]
verbs: ["get", "list"]

View file

@ -0,0 +1,13 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: external-secrets-rolebinding
namespace: openbao
subjects:
- kind: ServiceAccount
name: external-secrets
namespace: external-secrets
roleRef:
kind: Role
name: external-secrets-role
apiGroup: rbac.authorization.k8s.io

View file

@ -0,0 +1,20 @@
# cluster-store.yaml
apiVersion: external-secrets.io/v1beta1
kind: SecretStore #Kubernetes resource type
metadata:
name: bao-backend #resource name
namespace: openbao
spec:
provider:
vault: #specifies vault as the provider
# server: "http://10.244.0.28:8200" # how to map it dynamically?
server: "http://openbao.openbao.svc.cluster.local:8200"
path: "data" #path for accessing the secrets
version: "v1" #Vault API version
auth:
tokenSecretRef:
name: "vault-token" #Use a secret called vault-token
key: "token" #THIS REFERENCES THE INITIAL TOKEN NOW SAVED AS A K8 SECRET
# openbao-0.openbao.pod.cluster.local
# 10.96.59.250:8200

View file

@ -0,0 +1,25 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: fibonacci-app
namespace: argocd
labels:
env: dev
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
project: default
source:
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/ref-implementation/fibonacci-app"
destination:
server: "https://kubernetes.default.svc"
namespace: fibonacci-app
syncPolicy:
syncOptions:
- CreateNamespace=true
automated:
selfHeal: true
retry:
limit: -1

View file

@ -0,0 +1,20 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: fibonacci-deployment
namespace: fibonacci-app
spec:
replicas: 1
selector:
matchLabels:
app: fibonacci-go
template:
metadata:
labels:
app: fibonacci-go
spec:
containers:
- name: fibonacci-go
image: forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/christopher.hase/fibonacci_http_go:1.0.0
ports:
- containerPort: 9090

View file

@ -0,0 +1,13 @@
apiVersion: v1
kind: Service
metadata:
name: fibonacci-service
namespace: fibonacci-app
spec:
selector:
app: fibonacci-go
ports:
- protocol: TCP
port: 9090
targetPort: 9090
type: ClusterIP

View file

@ -18,12 +18,12 @@ spec:
name: in-cluster
namespace: gitea
sources:
- repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
- repoURL: https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/DevFW-CICD/forgejo-runner.git
path: forgejo-runner
targetRevision: HEAD
helm:
valueFiles:
- $values/stacks/core/forgejo-runner/values.yaml
- repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
- $values/stacks/ref-implementation/forgejo-runner/values.yaml
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values

View file

@ -0,0 +1,14 @@
replicaCount: 1
image:
repository: code.forgejo.org/forgejo/runner
pullPolicy: IfNotPresent
tag: "3.5.1"
resources:
limits:
memory: 2Gi
requests:
memory: 1Gi
forgejoUrl: http://forgejo-http.gitea.svc.cluster.local:3000

View file

@ -12,7 +12,7 @@ spec:
namespace: keycloak
server: "https://kubernetes.default.svc"
source:
repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
path: "stacks/ref-implementation/keycloak/manifests"
project: default

View file

@ -1,30 +0,0 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: keycloak-ingress-localhost
namespace: keycloak
annotations:
argocd.argoproj.io/sync-wave: "100"
spec:
ingressClassName: "nginx"
rules:
- host: localhost
http:
paths:
- path: /keycloak
pathType: ImplementationSpecific
backend:
service:
name: keycloak
port:
name: http
- host: {{ .Values.edfbuilderTargetDomain }}
http:
paths:
- path: /keycloak
pathType: ImplementationSpecific
backend:
service:
name: keycloak
port:
name: http

View file

@ -83,7 +83,7 @@ data:
proxy=edge
# hostname configuration
hostname={{ .Values.edfbuilderTargetDomain }}
hostname={{{ .Env.DOMAIN }}}
http-relative-path=keycloak
# the admin url requires its own configuration to reflect correct url
@ -151,6 +151,7 @@ spec:
volumeMounts:
- name: data
mountPath: /var/lib/postgresql/data
subPath: postgres
volumeClaimTemplates:
- metadata:
name: data

View file

@ -145,7 +145,7 @@ data:
"rootUrl": "",
"baseUrl": "",
"redirectUris": [
"https://{{ .Values.edfbuilderTargetDomain }}:443/argo-workflows/oauth2/callback"
"https://{{{ .Env.DOMAIN }}}:443/argo-workflows/oauth2/callback"
],
"webOrigins": [
"/*"
@ -174,7 +174,7 @@ data:
"rootUrl": "",
"baseUrl": "",
"redirectUris": [
"https://{{ .Values.edfbuilderTargetDomain }}:443/api/auth/keycloak-oidc/handler/frame"
"https://{{{ .Env.DOMAIN }}}:443/api/auth/keycloak-oidc/handler/frame"
],
"webOrigins": [
"/*"
@ -225,7 +225,12 @@ spec:
ADMIN_PASSWORD=$(cat /var/secrets/KEYCLOAK_ADMIN_PASSWORD)
USER1_PASSWORD=$(cat /var/secrets/USER_PASSWORD)
{{{ if eq .Env.CLUSTER_TYPE "kind" }}}
KEYCLOAK_URL=http://keycloak.keycloak.svc.cluster.local:8080/keycloak
{{{ end }}}
{{{ if eq .Env.CLUSTER_TYPE "osc" }}}
KEYCLOAK_URL=https://{{{ .Env.DOMAIN }}}/keycloak
{{{ end }}}
KEYCLOAK_TOKEN=$(curl -sS --fail-with-body -X POST -H "Content-Type: application/x-www-form-urlencoded" \
--data-urlencode "username=cnoe-admin" \

View file

@ -1,3 +1,4 @@
{{{ if eq .Env.CLUSTER_TYPE "kind" }}}
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
@ -27,3 +28,4 @@ spec:
selfHeal: true
syncOptions:
- CreateNamespace=true
{{{ end }}}

View file

@ -0,0 +1,34 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: openbao
namespace: argocd
labels:
env: dev
finalizers:
- resources-finalizer.argocd.argoproj.io
spec:
project: default
syncPolicy:
automated:
selfHeal: false
syncOptions:
- CreateNamespace=true
destination:
name: in-cluster
namespace: openbao
sources:
- repoURL: https://github.com/openbao/openbao-helm.git
path: charts/openbao
targetRevision: HEAD
helm:
valueFiles:
- $values/stacks/ref-implementation/openbao/values.yaml
- repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD
ref: values
ignoreDifferences:
- group: admissionregistration.k8s.io
kind: MutatingWebhookConfiguration
jqPathExpressions:
- .webhooks[]?.clientConfig.caBundle

View file

@ -0,0 +1,17 @@
server:
postStart:
- sh
- -c
- |
sleep 10
bao operator init >> /tmp/init.txt
cat /tmp/init.txt | grep "Key " | awk '{print $NF}' | xargs -I{} bao operator unseal {}
echo $(grep "Initial Root Token:" /tmp/init.txt | awk '{print $NF}')| cat > /openbao/data/initial_token.txt
echo $(grep "Unseal Key 1:" /tmp/init.txt | awk '{print $NF}')| cat > /openbao/data/unseal_key1.txt
echo $(grep "Unseal Key 2:" /tmp/init.txt | awk '{print $NF}')| cat > /openbao/data/unseal_key2.txt
echo $(grep "Unseal Key 3:" /tmp/init.txt | awk '{print $NF}')| cat > /openbao/data/unseal_key3.txt
echo $(grep "Unseal Key 4:" /tmp/init.txt | awk '{print $NF}')| cat > /openbao/data/unseal_key4.txt
echo $(grep "Unseal Key 5:" /tmp/init.txt | awk '{print $NF}')| cat > /openbao/data/unseal_key5.txt
rm /tmp/init.txt
ui:
enabled: true

View file

@ -19,5 +19,5 @@ spec:
namespace: crossplane-system
source:
path: stacks/second-cluster/create-new-cluster-guestbook
repoURL: https://gitea.{{ .Values.edfbuilderTargetDomain }}/giteaAdmin/edfbuilder
repoURL: https://gitea.{{{ .Env.DOMAIN }}}/giteaAdmin/edfbuilder
targetRevision: HEAD

Some files were not shown because too many files have changed in this diff Show more