diff --git a/argo-workflows/ci-workflow-using-clusterWorkflowTemplates.yaml b/argo-workflows/ci-workflow-using-clusterWorkflowTemplates.yaml new file mode 100644 index 0000000..60c7295 --- /dev/null +++ b/argo-workflows/ci-workflow-using-clusterWorkflowTemplates.yaml @@ -0,0 +1,78 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: ci-workflow-using-cluster-templates- + namespace: argo + labels: + workflows.argoproj.io/archive-strategy: "false" + annotations: + workflows.argoproj.io/description: | + This is a simple ci workflow that utilizes ClusterWorkflowTemplates. +spec: + entrypoint: ci + serviceAccountName: admin + volumeClaimTemplates: + - metadata: + name: shared-data + spec: + accessModes: ["ReadWriteOnce"] + resources: + requests: + storage: 1Gi + volumes: + - name: docker-config + secret: + secretName: my-docker-secret + templates: + - name: ci + dag: + tasks: + - name: git-clone + templateRef: + name: git-clone-template + template: git-clone + clusterScope: true + arguments: + parameters: + - name: git-repo + value: "https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/Franz.Germann/fibonacci_go.git" + - name: unit-tests + templateRef: + name: unit-tests-template + template: unit-tests + clusterScope: true + dependencies: [git-clone] + - name: lint-scan + templateRef: + name: lint-scan-template + template: lint-scan + clusterScope: true + dependencies: [git-clone] + - name: build + templateRef: + name: build-and-push-image-template + template: build + clusterScope: true + arguments: + parameters: + - name: dockerfile-name + value: "Dockerfile" + - name: image-destination + value: "gitea.cnoe.localtest.me/giteaadmin/fibonacci_go:latest" + dependencies: [unit-tests, lint-scan] + - name: trivy-filesystem-scan + templateRef: + name: trivy-filesystem-scan-template + template: trivy-filesystem-scan + clusterScope: true + dependencies: [git-clone] + - name: trivy-image-scan + templateRef: + name: trivy-image-scan-template + template: trivy-image-scan + clusterScope: true + arguments: + parameters: + - name: image + value: "gitea.cnoe.localtest.me/giteaadmin/fibonacci_go:latest" + dependencies: [build] diff --git a/argo-workflows/example-ci-workflow.yaml b/argo-workflows/example-ci-workflow.yaml index 3da946c..cc97e83 100644 --- a/argo-workflows/example-ci-workflow.yaml +++ b/argo-workflows/example-ci-workflow.yaml @@ -63,7 +63,9 @@ spec: image: alpine:3.20.3 command: [sh, -c] args: - - | + - | + set -e + ls -la / ls -la /shared-data ls -la /shared-data/repo @@ -77,6 +79,8 @@ spec: command: [sh, -c] args: - | + set -e + apt update apt install -y git git clone -b main https://forgejo.edf-bootstrap.cx.fg1.ffm.osc.live/Franz.Germann/fibonacci_go.git /shared-data/repo @@ -108,6 +112,8 @@ spec: command: [sh, -c] args: - | + set -e + cd /shared-data/repo go test ./... -v @@ -123,6 +129,8 @@ spec: command: [sh, -c] args: - | + set -e + cd /shared-data/repo golangci-lint run ./... --out-format=json --timeout 5m --issues-exit-code 1 @@ -137,6 +145,8 @@ spec: command: [sh, -c] args: - | + set -e + trivy fs --scanners license,vuln,misconfig,secret /shared-data/repo echo trivy-filesystem-scan task completed @@ -148,4 +158,11 @@ spec: container: image: aquasec/trivy:0.56.2 command: [sh, -c] - args: ["TRIVY_INSECURE=true trivy image --scanners vuln,secret,misconfig gitea.cnoe.localtest.me/giteaadmin/fibonacci_go:latest"] \ No newline at end of file + args: + - | + set -e + + TRIVY_INSECURE=true trivy image --scanners license,vuln,secret gitea.cnoe.localtest.me/giteaadmin/fibonacci_go:latest + TRIVY_INSECURE=true trivy image --image-config-scanners secret,misconfig gitea.cnoe.localtest.me/giteaadmin/fibonacci_go:latest + + echo trivy-image-scan task completed \ No newline at end of file diff --git a/argo-workflows/templates/build-and-push-image-template.yaml b/argo-workflows/templates/build-and-push-image-template.yaml new file mode 100644 index 0000000..d1b8e39 --- /dev/null +++ b/argo-workflows/templates/build-and-push-image-template.yaml @@ -0,0 +1,26 @@ +apiVersion: argoproj.io/v1alpha1 +kind: ClusterWorkflowTemplate +metadata: + name: build-and-push-image-template +spec: + entrypoint: build + templates: + - name: build + inputs: + parameters: + - name: dockerfile-name + - name: image-destination + container: + image: gcr.io/kaniko-project/executor:v1.23.2 + args: + [ + "--dockerfile={{inputs.parameters.dockerfile-name}}", + "--context=/shared-data/repo/", + "--destination={{inputs.parameters.image-destination}}", + "--skip-tls-verify" + ] + volumeMounts: + - name: shared-data + mountPath: /shared-data + - name: docker-config + mountPath: /kaniko/.docker/ \ No newline at end of file diff --git a/argo-workflows/templates/git-clone-template.yaml b/argo-workflows/templates/git-clone-template.yaml new file mode 100644 index 0000000..ba6c5f6 --- /dev/null +++ b/argo-workflows/templates/git-clone-template.yaml @@ -0,0 +1,26 @@ +apiVersion: argoproj.io/v1alpha1 +kind: ClusterWorkflowTemplate +metadata: + name: git-clone-template +spec: + entrypoint: git-clone + templates: + - name: git-clone + inputs: + parameters: + - name: git-repo + container: + image: ubuntu:24.10 + command: [sh, -c] + args: + - | + set -e + + apt update + apt install -y git + git clone -b main {{inputs.parameters.git-repo}} /shared-data/repo + + echo git-clone task completed + volumeMounts: + - name: shared-data + mountPath: /shared-data \ No newline at end of file diff --git a/argo-workflows/templates/lint-scan-template.yaml b/argo-workflows/templates/lint-scan-template.yaml new file mode 100644 index 0000000..ee82068 --- /dev/null +++ b/argo-workflows/templates/lint-scan-template.yaml @@ -0,0 +1,22 @@ +apiVersion: argoproj.io/v1alpha1 +kind: ClusterWorkflowTemplate +metadata: + name: lint-scan-template +spec: + entrypoint: lint-scan + templates: + - name: lint-scan + container: + image: golangci/golangci-lint:v1.61.0 + command: [sh, -c] + args: + - | + set -e + + cd /shared-data/repo + golangci-lint run ./... --out-format=json --timeout 5m --issues-exit-code 1 + + echo lint-scan task completed + volumeMounts: + - name: shared-data + mountPath: /shared-data \ No newline at end of file diff --git a/argo-workflows/templates/trivy-filesystem-scan-template.yaml b/argo-workflows/templates/trivy-filesystem-scan-template.yaml new file mode 100644 index 0000000..c33d970 --- /dev/null +++ b/argo-workflows/templates/trivy-filesystem-scan-template.yaml @@ -0,0 +1,21 @@ +apiVersion: argoproj.io/v1alpha1 +kind: ClusterWorkflowTemplate +metadata: + name: trivy-filesystem-scan-template +spec: + entrypoint: trivy-filesystem-scan + templates: + - name: trivy-filesystem-scan + container: + image: aquasec/trivy:0.56.2 + command: [sh, -c] + args: + - | + set -e + + trivy fs --scanners license,vuln,misconfig,secret /shared-data/repo + + echo trivy-filesystem-scan task completed + volumeMounts: + - name: shared-data + mountPath: /shared-data \ No newline at end of file diff --git a/argo-workflows/templates/trivy-image-scan-template.yaml b/argo-workflows/templates/trivy-image-scan-template.yaml new file mode 100644 index 0000000..d3152f5 --- /dev/null +++ b/argo-workflows/templates/trivy-image-scan-template.yaml @@ -0,0 +1,22 @@ +apiVersion: argoproj.io/v1alpha1 +kind: ClusterWorkflowTemplate +metadata: + name: trivy-image-scan-template +spec: + entrypoint: trivy-image-scan + templates: + - name: trivy-image-scan + inputs: + parameters: + - name: image + container: + image: aquasec/trivy:0.56.2 + command: [sh, -c] + args: + - | + set -e + + TRIVY_INSECURE=true trivy image --scanners license,vuln,secret {{inputs.parameters.image}} + TRIVY_INSECURE=true trivy image --image-config-scanners secret,misconfig {{inputs.parameters.image}} + + echo trivy-image-scan task completed \ No newline at end of file diff --git a/argo-workflows/templates/unit-tests-template.yaml b/argo-workflows/templates/unit-tests-template.yaml new file mode 100644 index 0000000..5e996b8 --- /dev/null +++ b/argo-workflows/templates/unit-tests-template.yaml @@ -0,0 +1,22 @@ +apiVersion: argoproj.io/v1alpha1 +kind: ClusterWorkflowTemplate +metadata: + name: unit-tests-template +spec: + entrypoint: unit-tests + templates: + - name: unit-tests + container: + image: golang:1.23.2 + command: [sh, -c] + args: + - | + set -e + + cd /shared-data/repo + go test ./... -v + + echo unit-test task completed + volumeMounts: + - name: shared-data + mountPath: /shared-data \ No newline at end of file diff --git a/argo-workflows/thisOneWorks_spark.yaml b/argo-workflows/thisOneWorks_spark.yaml deleted file mode 100644 index 953fc61..0000000 --- a/argo-workflows/thisOneWorks_spark.yaml +++ /dev/null @@ -1,219 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - annotations: - argocd.argoproj.io/tracking-id: test:argoproj.io/Workflow:argo/test - kubectl.kubernetes.io/last-applied-configuration: | - {"apiVersion":"argoproj.io/v1alpha1","kind":"Workflow","metadata":{"annotations":{"argocd.argoproj.io/tracking-id":"test:argoproj.io/Workflow:argo/test"},"labels":{"entity-id":"test","env":"dev"},"name":"test","namespace":"argo"},"spec":{"action":"create","entrypoint":"main","serviceAccountName":"admin","templates":[{"name":"main","steps":[[{"name":"spark-job","template":"spark-job"}],[{"arguments":{"parameters":[{"name":"spark-job-name","value":"{{steps.spark-job.outputs.parameters.spark-job-name}}"}]},"name":"wait","template":"wait"}]]},{"inputs":{"parameters":[{"name":"spark-job-name"}]},"name":"wait","resource":{"action":"get","failureCondition":"status.applicationState.state == FAILED","manifest":"apiVersion: \"sparkoperator.k8s.io/v1beta2\"\nkind: SparkApplication\nmetadata:\n name: {{inputs.parameters.spark-job-name}}\n namespace: argo\n","successCondition":"status.applicationState.state == COMPLETED"}},{"name":"spark-job","outputs":{"parameters":[{"name":"spark-job-name","valueFrom":{"jsonPath":"{.metadata.name}"}}]},"resource":{"action":"create","manifest":"apiVersion: \"sparkoperator.k8s.io/v1beta2\"\nkind: SparkApplication\nmetadata:\n name: spark-pi-test\n namespace: argo\n labels:\n env: dev\n entity-id: test\nspec:\n type: Scala\n mode: cluster\n image: \"docker.io/apache/spark:v3.1.3\"\n imagePullPolicy: IfNotPresent\n mainClass: org.apache.spark.examples.SparkPi\n mainApplicationFile: \"local:///opt/spark/examples/jars/spark-examples_2.12-3.1.3.jar\"\n sparkVersion: \"3.1.1\"\n restartPolicy:\n type: Never\n volumes:\n - name: \"test-volume\"\n hostPath:\n path: \"/tmp\"\n type: Directory\n driver:\n cores: 1\n coreLimit: \"1200m\"\n memory: \"512m\"\n labels:\n version: 3.1.1\n serviceAccount: admin\n volumeMounts:\n - name: \"test-volume\"\n mountPath: \"/tmp\"\n executor:\n cores: 1\n instances: 1\n memory: \"512m\"\n labels:\n version: 3.1.1\n volumeMounts:\n - name: \"test-volume\"\n mountPath: \"/tmp\"\n","setOwnerReference":true}}]}} - workflows.argoproj.io/pod-name-format: v2 - creationTimestamp: "2024-10-16T10:24:01Z" - generation: 17 - labels: - entity-id: test - env: dev - workflows.argoproj.io/completed: "false" - workflows.argoproj.io/phase: Succeeded - name: test - namespace: argo - resourceVersion: "5041" - uid: 41ef434b-6002-4ccc-be25-424d8de6e69d -spec: - action: create - arguments: {} - entrypoint: main - serviceAccountName: admin - templates: - - name: main - steps: - - - name: spark-job - template: spark-job - - - arguments: - parameters: - - name: spark-job-name - value: '{{steps.spark-job.outputs.parameters.spark-job-name}}' - name: wait - template: wait - - inputs: - parameters: - - name: spark-job-name - name: wait - resource: - action: get - failureCondition: status.applicationState.state == FAILED - manifest: | - apiVersion: "sparkoperator.k8s.io/v1beta2" - kind: SparkApplication - metadata: - name: {{inputs.parameters.spark-job-name}} - namespace: argo - successCondition: status.applicationState.state == COMPLETED - - name: spark-job - outputs: - parameters: - - name: spark-job-name - valueFrom: - jsonPath: '{.metadata.name}' - resource: - action: create - manifest: | - apiVersion: "sparkoperator.k8s.io/v1beta2" - kind: SparkApplication - metadata: - name: spark-pi-test - namespace: argo - labels: - env: dev - entity-id: test - spec: - type: Scala - mode: cluster - image: "docker.io/apache/spark:v3.1.3" - imagePullPolicy: IfNotPresent - mainClass: org.apache.spark.examples.SparkPi - mainApplicationFile: "local:///opt/spark/examples/jars/spark-examples_2.12-3.1.3.jar" - sparkVersion: "3.1.1" - restartPolicy: - type: Never - volumes: - - name: "test-volume" - hostPath: - path: "/tmp" - type: Directory - driver: - cores: 1 - coreLimit: "1200m" - memory: "512m" - labels: - version: 3.1.1 - serviceAccount: admin - volumeMounts: - - name: "test-volume" - mountPath: "/tmp" - executor: - cores: 1 - instances: 1 - memory: "512m" - labels: - version: 3.1.1 - volumeMounts: - - name: "test-volume" - mountPath: "/tmp" - setOwnerReference: true -status: - artifactGCStatus: - notSpecified: true - artifactRepositoryRef: - artifactRepository: {} - default: true - conditions: - - status: "False" - type: PodRunning - finishedAt: null - nodes: - test: - children: - - test-4218752377 - displayName: test - finishedAt: "2024-10-16T10:24:31Z" - id: test - name: test - outboundNodes: - - test-2776088435 - phase: Succeeded - progress: 2/2 - resourcesDuration: - cpu: 17 - memory: 17 - startedAt: "2024-10-16T10:24:01Z" - templateName: main - templateScope: local/test - type: Steps - test-930589316: - boundaryID: test - children: - - test-2776088435 - displayName: '[1]' - finishedAt: "2024-10-16T10:24:31Z" - id: test-930589316 - name: test[1] - nodeFlag: {} - phase: Succeeded - progress: 1/1 - resourcesDuration: - cpu: 16 - memory: 16 - startedAt: "2024-10-16T10:24:11Z" - templateScope: local/test - type: StepGroup - test-1871935052: - boundaryID: test - children: - - test-930589316 - displayName: spark-job - finishedAt: "2024-10-16T10:24:03Z" - hostNodeName: localdev-control-plane - id: test-1871935052 - name: test[0].spark-job - outputs: - exitCode: "0" - parameters: - - name: spark-job-name - value: spark-pi-test - valueFrom: - jsonPath: '{.metadata.name}' - phase: Succeeded - progress: 1/1 - resourcesDuration: - cpu: 1 - memory: 1 - startedAt: "2024-10-16T10:24:01Z" - templateName: spark-job - templateScope: local/test - type: Pod - test-2776088435: - boundaryID: test - displayName: wait - finishedAt: "2024-10-16T10:24:28Z" - hostNodeName: localdev-control-plane - id: test-2776088435 - inputs: - parameters: - - name: spark-job-name - value: spark-pi-test - name: test[1].wait - outputs: - exitCode: "0" - phase: Succeeded - progress: 1/1 - resourcesDuration: - cpu: 16 - memory: 16 - startedAt: "2024-10-16T10:24:11Z" - templateName: wait - templateScope: local/test - type: Pod - test-4218752377: - boundaryID: test - children: - - test-1871935052 - displayName: '[0]' - finishedAt: "2024-10-16T10:24:11Z" - id: test-4218752377 - name: test[0] - nodeFlag: {} - phase: Succeeded - progress: 2/2 - resourcesDuration: - cpu: 17 - memory: 17 - startedAt: "2024-10-16T10:24:01Z" - templateScope: local/test - type: StepGroup - phase: Succeeded - progress: 2/2 - resourcesDuration: - cpu: 17 - memory: 17 - startedAt: "2024-10-16T10:24:01Z" - taskResultsCompletionStatus: - test-1871935052: false \ No newline at end of file diff --git a/config.json b/config.json new file mode 100644 index 0000000..15bc556 --- /dev/null +++ b/config.json @@ -0,0 +1,7 @@ +{ + "auths": { + "https://gitea.cnoe.localtest.me": { + "auth": "Z2l0ZWFBZG1pbjozbUp5QkFYSUhqT3JPWlZaYlROMjlRPT0=" + } + } +} \ No newline at end of file