initial commit

This commit is contained in:
franz.germann1 2024-10-18 14:26:36 +02:00
parent ce54891525
commit 57d72b762e
11 changed files with 527 additions and 18 deletions

17
.gitignore vendored
View file

@ -1,7 +1,3 @@
# ---> Go
# If you prefer the allow list template instead of the deny list, see community template:
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
#
# Binaries for programs and plugins # Binaries for programs and plugins
*.exe *.exe
*.exe~ *.exe~
@ -9,19 +5,8 @@
*.so *.so
*.dylib *.dylib
# Test binary, built with `go test -c` # Test binary, build with `go test -c`
*.test *.test
# Output of the go coverage tool, specifically when used with LiteIDE # Output of the go coverage tool, specifically when used with LiteIDE
*.out *.out
# Dependency directories (remove the comment below to include it)
# vendor/
# Go workspace file
go.work
go.work.sum
# env file
.env

1
.sonarcloud.properties Normal file
View file

@ -0,0 +1 @@

10
.travis.yml Normal file
View file

@ -0,0 +1,10 @@
language: go
go:
- 1.10.x
go_import_path: github.com/t-pwk/go-fibonacci
before_script:
- go get golang.org/x/tools/cmd/cover
- go get github.com/mattn/goveralls
script:
- go test -v -covermode=count -coverprofile=coverage.out ./...
- "$HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci -repotoken $COVERALLS_TOKEN"

21
LICENSE Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Tom
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,3 +1,47 @@
# fibonacci_go # High-performance Fibonacci numbers implementation in Go
High-performance Fibonacci numbers implementation in Go [![Build Status](https://travis-ci.com/T-PWK/go-fibonacci.svg?branch=v1.0.0)](https://travis-ci.com/T-PWK/go-fibonacci)
[![GitHub issues](https://img.shields.io/github/issues/T-PWK/go-fibonacci.svg)](https://github.com/T-PWK/go-fibonacci/issues)
[![Go Report Card](https://goreportcard.com/badge/github.com/T-PWK/go-fibonacci)](https://goreportcard.com/report/github.com/T-PWK/go-fibonacci)
[![Coverage Status](https://coveralls.io/repos/github/T-PWK/go-fibonacci/badge.svg?branch=v1.0.0)](https://coveralls.io/github/T-PWK/go-fibonacci?branch=master)
[![GoDoc](https://godoc.org/github.com/T-PWK/go-fibonacci?status.svg)](https://godoc.org/github.com/T-PWK/go-fibonacci)
[![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://blog.abelotech.com/mit-license/)
In mathematics, the Fibonacci numbers are the numbers in the following integer sequence, called the Fibonacci sequence, and characterized by the fact that every number after the first two is the sum of the two preceding ones:
```
1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, ...
```
Often, especially in modern usage, the sequence is extended by one more initial term:
```
0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, ...
```
This implementation has two methods: `Fibonacci` and `FibonacciBig`.
The `Fibonacci` function is more efficient, however, it returns correct numbers between 0 and 93 (inclusive). The `FibonacciBig` function, on the other hand, is less efficient but returns practically any Fibonacci number.
Example:
```go
package main
import (
"fmt"
"github.com/t-pwk/go-fibonacci"
)
func main() {
fmt.Println("20: ", fib.Fibonacci(20))
fmt.Println("200: ", fib.FibonacciBig(200))
}
```
And the output is
```
20: 6765
200: 280571172992510140037611932413038677189525
```

View file

@ -0,0 +1,73 @@
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: example-ci-workflow-
namespace: argo
labels:
workflows.argoproj.io/archive-strategy: "false"
annotations:
workflows.argoproj.io/description: |
This is a simple workflow to show what steps we need to take to deploy an application.
spec:
entrypoint: ci
serviceAccountName: admin
volumes:
- name: shared-data
emptyDir: {}
templates:
- name: ci
dag:
tasks:
- name: git-clone
template: simple-container
arguments:
parameters: [{name: message, value: "git-clone task completed"}]
- name: ls
template: ls
dependencies: [git-clone]
- name: build
template: simple-container
arguments:
parameters: [{name: message, value: "build task completed"}]
dependencies: [unit-tests, lint-scan]
- name: unit-tests
template: simple-container
arguments:
parameters: [{name: message, value: "unit-tests task completed"}]
dependencies: [ls]
- name: lint-scan
template: simple-container
arguments:
parameters: [{name: message, value: "lint-scan task completed"}]
dependencies: [ls]
- name: trivy-image-scan
template: simple-container
arguments:
parameters: [{name: message, value: "trivy-image-scan task completed"}]
dependencies: [build]
- name: trivy-filesystem-scan
template: simple-container
arguments:
parameters: [{name: message, value: "trivy-filesystem-scan task completed"}]
dependencies: [git-clone]
- name: push-image
template: simple-container
arguments:
parameters: [{name: message, value: "push-image task completed"}]
# when: " == true"
dependencies: [trivy-image-scan, trivy-filesystem-scan]
- name: simple-container
inputs:
parameters:
- name: message
container:
image: alpine:latest
command: [sh, -c]
args: ["echo {{inputs.parameters.message}}"]
- name: ls
container:
image: alpine:latest
command: [sh, -c]
args: [ls /]

View file

@ -0,0 +1,219 @@
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
annotations:
argocd.argoproj.io/tracking-id: test:argoproj.io/Workflow:argo/test
kubectl.kubernetes.io/last-applied-configuration: |
{"apiVersion":"argoproj.io/v1alpha1","kind":"Workflow","metadata":{"annotations":{"argocd.argoproj.io/tracking-id":"test:argoproj.io/Workflow:argo/test"},"labels":{"entity-id":"test","env":"dev"},"name":"test","namespace":"argo"},"spec":{"action":"create","entrypoint":"main","serviceAccountName":"admin","templates":[{"name":"main","steps":[[{"name":"spark-job","template":"spark-job"}],[{"arguments":{"parameters":[{"name":"spark-job-name","value":"{{steps.spark-job.outputs.parameters.spark-job-name}}"}]},"name":"wait","template":"wait"}]]},{"inputs":{"parameters":[{"name":"spark-job-name"}]},"name":"wait","resource":{"action":"get","failureCondition":"status.applicationState.state == FAILED","manifest":"apiVersion: \"sparkoperator.k8s.io/v1beta2\"\nkind: SparkApplication\nmetadata:\n name: {{inputs.parameters.spark-job-name}}\n namespace: argo\n","successCondition":"status.applicationState.state == COMPLETED"}},{"name":"spark-job","outputs":{"parameters":[{"name":"spark-job-name","valueFrom":{"jsonPath":"{.metadata.name}"}}]},"resource":{"action":"create","manifest":"apiVersion: \"sparkoperator.k8s.io/v1beta2\"\nkind: SparkApplication\nmetadata:\n name: spark-pi-test\n namespace: argo\n labels:\n env: dev\n entity-id: test\nspec:\n type: Scala\n mode: cluster\n image: \"docker.io/apache/spark:v3.1.3\"\n imagePullPolicy: IfNotPresent\n mainClass: org.apache.spark.examples.SparkPi\n mainApplicationFile: \"local:///opt/spark/examples/jars/spark-examples_2.12-3.1.3.jar\"\n sparkVersion: \"3.1.1\"\n restartPolicy:\n type: Never\n volumes:\n - name: \"test-volume\"\n hostPath:\n path: \"/tmp\"\n type: Directory\n driver:\n cores: 1\n coreLimit: \"1200m\"\n memory: \"512m\"\n labels:\n version: 3.1.1\n serviceAccount: admin\n volumeMounts:\n - name: \"test-volume\"\n mountPath: \"/tmp\"\n executor:\n cores: 1\n instances: 1\n memory: \"512m\"\n labels:\n version: 3.1.1\n volumeMounts:\n - name: \"test-volume\"\n mountPath: \"/tmp\"\n","setOwnerReference":true}}]}}
workflows.argoproj.io/pod-name-format: v2
creationTimestamp: "2024-10-16T10:24:01Z"
generation: 17
labels:
entity-id: test
env: dev
workflows.argoproj.io/completed: "false"
workflows.argoproj.io/phase: Succeeded
name: test
namespace: argo
resourceVersion: "5041"
uid: 41ef434b-6002-4ccc-be25-424d8de6e69d
spec:
action: create
arguments: {}
entrypoint: main
serviceAccountName: admin
templates:
- name: main
steps:
- - name: spark-job
template: spark-job
- - arguments:
parameters:
- name: spark-job-name
value: '{{steps.spark-job.outputs.parameters.spark-job-name}}'
name: wait
template: wait
- inputs:
parameters:
- name: spark-job-name
name: wait
resource:
action: get
failureCondition: status.applicationState.state == FAILED
manifest: |
apiVersion: "sparkoperator.k8s.io/v1beta2"
kind: SparkApplication
metadata:
name: {{inputs.parameters.spark-job-name}}
namespace: argo
successCondition: status.applicationState.state == COMPLETED
- name: spark-job
outputs:
parameters:
- name: spark-job-name
valueFrom:
jsonPath: '{.metadata.name}'
resource:
action: create
manifest: |
apiVersion: "sparkoperator.k8s.io/v1beta2"
kind: SparkApplication
metadata:
name: spark-pi-test
namespace: argo
labels:
env: dev
entity-id: test
spec:
type: Scala
mode: cluster
image: "docker.io/apache/spark:v3.1.3"
imagePullPolicy: IfNotPresent
mainClass: org.apache.spark.examples.SparkPi
mainApplicationFile: "local:///opt/spark/examples/jars/spark-examples_2.12-3.1.3.jar"
sparkVersion: "3.1.1"
restartPolicy:
type: Never
volumes:
- name: "test-volume"
hostPath:
path: "/tmp"
type: Directory
driver:
cores: 1
coreLimit: "1200m"
memory: "512m"
labels:
version: 3.1.1
serviceAccount: admin
volumeMounts:
- name: "test-volume"
mountPath: "/tmp"
executor:
cores: 1
instances: 1
memory: "512m"
labels:
version: 3.1.1
volumeMounts:
- name: "test-volume"
mountPath: "/tmp"
setOwnerReference: true
status:
artifactGCStatus:
notSpecified: true
artifactRepositoryRef:
artifactRepository: {}
default: true
conditions:
- status: "False"
type: PodRunning
finishedAt: null
nodes:
test:
children:
- test-4218752377
displayName: test
finishedAt: "2024-10-16T10:24:31Z"
id: test
name: test
outboundNodes:
- test-2776088435
phase: Succeeded
progress: 2/2
resourcesDuration:
cpu: 17
memory: 17
startedAt: "2024-10-16T10:24:01Z"
templateName: main
templateScope: local/test
type: Steps
test-930589316:
boundaryID: test
children:
- test-2776088435
displayName: '[1]'
finishedAt: "2024-10-16T10:24:31Z"
id: test-930589316
name: test[1]
nodeFlag: {}
phase: Succeeded
progress: 1/1
resourcesDuration:
cpu: 16
memory: 16
startedAt: "2024-10-16T10:24:11Z"
templateScope: local/test
type: StepGroup
test-1871935052:
boundaryID: test
children:
- test-930589316
displayName: spark-job
finishedAt: "2024-10-16T10:24:03Z"
hostNodeName: localdev-control-plane
id: test-1871935052
name: test[0].spark-job
outputs:
exitCode: "0"
parameters:
- name: spark-job-name
value: spark-pi-test
valueFrom:
jsonPath: '{.metadata.name}'
phase: Succeeded
progress: 1/1
resourcesDuration:
cpu: 1
memory: 1
startedAt: "2024-10-16T10:24:01Z"
templateName: spark-job
templateScope: local/test
type: Pod
test-2776088435:
boundaryID: test
displayName: wait
finishedAt: "2024-10-16T10:24:28Z"
hostNodeName: localdev-control-plane
id: test-2776088435
inputs:
parameters:
- name: spark-job-name
value: spark-pi-test
name: test[1].wait
outputs:
exitCode: "0"
phase: Succeeded
progress: 1/1
resourcesDuration:
cpu: 16
memory: 16
startedAt: "2024-10-16T10:24:11Z"
templateName: wait
templateScope: local/test
type: Pod
test-4218752377:
boundaryID: test
children:
- test-1871935052
displayName: '[0]'
finishedAt: "2024-10-16T10:24:11Z"
id: test-4218752377
name: test[0]
nodeFlag: {}
phase: Succeeded
progress: 2/2
resourcesDuration:
cpu: 17
memory: 17
startedAt: "2024-10-16T10:24:01Z"
templateScope: local/test
type: StepGroup
phase: Succeeded
progress: 2/2
resourcesDuration:
cpu: 17
memory: 17
startedAt: "2024-10-16T10:24:01Z"
taskResultsCompletionStatus:
test-1871935052: false

37
fib.go Normal file
View file

@ -0,0 +1,37 @@
package fib
import "math/big"
// Fibonacci calculates Fibonacci number.
// This function generated correct values from 0 to 93 sequence number.
// For bigger values use FibonacciBig function.
func Fibonacci(n uint) uint64 {
if n <= 1 {
return uint64(n)
}
var n2, n1 uint64 = 0, 1
for i := uint(2); i < n; i++ {
n2, n1 = n1, n1+n2
}
return n2 + n1
}
// FibonacciBig calculates Fibonacci number using bit.Int.
// For the sequence numbers below 94, it is recommended to use Fibonacci function as it is more efficient.
func FibonacciBig(n uint) *big.Int {
if n <= 1 {
return big.NewInt(int64(n))
}
var n2, n1 = big.NewInt(0), big.NewInt(1)
for i := uint(1); i < n; i++ {
n2.Add(n2, n1)
n1, n2 = n2, n1
}
return n1
}

57
fib_test.go Normal file
View file

@ -0,0 +1,57 @@
package fib
import "testing"
func BenchmarkFibonacci_10(b *testing.B) {
for i := 0; i < b.N; i++ {
Fibonacci(10)
}
}
func BenchmarkFibonacci_20(b *testing.B) {
for i := 0; i < b.N; i++ {
Fibonacci(20)
}
}
func BenchmarkFibonacciBig_10(b *testing.B) {
for i := 0; i < b.N; i++ {
FibonacciBig(10)
}
}
func BenchmarkFibonacciBig_20(b *testing.B) {
for i := 0; i < b.N; i++ {
FibonacciBig(20)
}
}
func TestFibonacci(t *testing.T) {
data := []struct {
n uint
want uint64
}{
{0, 0}, {1, 1}, {2, 1}, {3, 2}, {4, 3}, {5, 5}, {6, 8}, {10, 55}, {42, 267914296},
}
for _, d := range data {
if got := Fibonacci(d.n); got != d.want {
t.Errorf("Invalid Fibonacci value for N: %d, got: %d, want: %d", d.n, got, d.want)
}
}
}
func TestFibonacciBig(t *testing.T) {
data := []struct {
n uint
want int64
}{
{0, 0}, {1, 1}, {2, 1}, {3, 2}, {4, 3}, {5, 5}, {6, 8}, {10, 55}, {42, 267914296},
}
for _, d := range data {
if got := FibonacciBig(d.n); got.Int64() != d.want {
t.Errorf("Invalid Fibonacci value for N: %d, got: %d, want: %d", d.n, got, d.want)
}
}
}

56
forgejo_runner.yaml Normal file
View file

@ -0,0 +1,56 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: forgejo-runner
namespace: default # change this if you have a specific namespace
labels:
app: forgejo-runner
spec:
replicas: 1
selector:
matchLabels:
app: forgejo-runner
template:
metadata:
labels:
app: forgejo-runner
spec:
containers:
- name: forgejo-runner
image: docker.io/gitea/act_runner:latest # Use the latest version or specify a particular tag
env:
# Set the URL of your Forgejo server here
- name: RUNNER_FORGEJO_URL
value: "https://cnoe.localtest.me:8443/gitea/" # Replace with your Forgejo server URL
# Token used to authenticate the runner with the Forgejo server
- name: RUNNER_FORGEJO_TOKEN
value: "LzlJirWbzxcLByhFpl6JPK8PQylKEdMRt1jTvLj7" # Replace with your runner token
# Optional - Runner labels to organize or tag your runners
- name: RUNNER_LABELS
value: "self-hosted,linux,kubernetes"
# Optional - Runner name for identification on the Forgejo server
- name: RUNNER_NAME
value: "forgejo-runner-k8s"
# Set this to debug mode if you want more verbose logging
- name: RUNNER_LOG_LEVEL
value: "info" # or "debug"
resources:
requests:
cpu: "500m"
memory: "512Mi"
limits:
cpu: "1000m"
memory: "1Gi"
volumeMounts:
- name: runner-data
mountPath: /data # Store runner data here
volumes:
- name: runner-data
emptyDir: {} # Use emptyDir for ephemeral storage or configure persistent storage as needed

6
simple-job.yaml Normal file
View file

@ -0,0 +1,6 @@
on: [push]
jobs:
test:
runs-on: docker
steps:
- run: echo All Good