Compare commits
54 Commits
dev
...
argo-workf
Author | SHA1 | Date | |
---|---|---|---|
e62cc94607 | |||
f9d73ec304 | |||
90460d78f2 | |||
31868e928b | |||
8a75354fec | |||
b449e20d0e | |||
fc94078c80 | |||
944ee22cc7 | |||
d5dc7d91e6 | |||
a13187a87d | |||
b66a5fd19f | |||
80ce5e1d2d | |||
fdf79aed50 | |||
bf8dc5045f | |||
972026ede3 | |||
272c201aa4 | |||
7ed39517e7 | |||
3ec81ae920 | |||
358d5712cf | |||
2c56cd6a2b | |||
5155356aa0 | |||
c2af6c6ca8 | |||
49d7f8a500 | |||
e32403dd86 | |||
64605796b2 | |||
a652a0bddb | |||
30ba18cace | |||
e297121e93 | |||
9e4cc61e30 | |||
90dcd19a46 | |||
a5a9c9ec43 | |||
a0a9d7d592 | |||
7ada42d7f8 | |||
2777e73aa2 | |||
77884b291d | |||
c9ffa1c420 | |||
c99b2be62f | |||
239df0af11 | |||
a44bf142ba | |||
3fb4ffd621 | |||
9659af1c9a | |||
a77ec1a6f8 | |||
901805bd01 | |||
2886274d5e | |||
8396169b19 | |||
8eb3dbfd59 | |||
dd248dc0b9 | |||
c8cd08d7ff | |||
0c02c20995 | |||
7b12088952 | |||
7a411a7148 | |||
37ea900325 | |||
2a80c733b3 | |||
3764970082 |
20
argo-workflows/argo-ingress.yaml
Normal file
20
argo-workflows/argo-ingress.yaml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: argo-ingress
|
||||||
|
namespace: argo
|
||||||
|
annotations:
|
||||||
|
nginx.ingress.kubernetes.io/ssl-redirect: "false"
|
||||||
|
spec:
|
||||||
|
ingressClassName: nginx
|
||||||
|
rules:
|
||||||
|
- host: argo.marcin00.pl
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: argo-server
|
||||||
|
port:
|
||||||
|
number: 2746
|
23
argo-workflows/argo-workflow-manager-role.yaml
Normal file
23
argo-workflows/argo-workflow-manager-role.yaml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
apiVersion: rbac.authorization.k8s.io/v1
|
||||||
|
kind: Role
|
||||||
|
metadata:
|
||||||
|
name: argo-workflow-manager
|
||||||
|
namespace: argo-events
|
||||||
|
rules:
|
||||||
|
- apiGroups: ["argoproj.io"]
|
||||||
|
resources: ["workflows", "workflowtemplates", "cronworkflows"]
|
||||||
|
verbs: ["get", "list", "watch"]
|
||||||
|
---
|
||||||
|
apiVersion: rbac.authorization.k8s.io/v1
|
||||||
|
kind: RoleBinding
|
||||||
|
metadata:
|
||||||
|
name: argo-ui-user-read-access
|
||||||
|
namespace: argo-events
|
||||||
|
roleRef:
|
||||||
|
apiGroup: rbac.authorization.k8s.io
|
||||||
|
kind: Role
|
||||||
|
name: argo-workflow-manager
|
||||||
|
subjects:
|
||||||
|
- kind: ServiceAccount
|
||||||
|
name: argo-ui-user
|
||||||
|
namespace: argo
|
109
argo-workflows/deploy-sensor.yaml
Normal file
109
argo-workflows/deploy-sensor.yaml
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: Sensor
|
||||||
|
metadata:
|
||||||
|
name: webhook-deploy
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
serviceAccountName: operate-workflow-sa
|
||||||
|
dependencies:
|
||||||
|
- name: gitea-push
|
||||||
|
eventSourceName: webhook
|
||||||
|
eventName: user-microservice-deploy
|
||||||
|
triggers:
|
||||||
|
- template:
|
||||||
|
name: deploy-user-microservice
|
||||||
|
k8s:
|
||||||
|
operation: create
|
||||||
|
source:
|
||||||
|
resource:
|
||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: Workflow
|
||||||
|
metadata:
|
||||||
|
generateName: deploy-user-microservice-
|
||||||
|
spec:
|
||||||
|
entrypoint: main
|
||||||
|
serviceAccountName: operate-workflow-sa
|
||||||
|
volumeClaimTemplates:
|
||||||
|
- metadata:
|
||||||
|
name: workspace
|
||||||
|
spec:
|
||||||
|
accessModes: ["ReadWriteOnce"]
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 128Mi
|
||||||
|
templates:
|
||||||
|
- name: main
|
||||||
|
steps:
|
||||||
|
- - name: checkout
|
||||||
|
template: checkout
|
||||||
|
- - name: deploy
|
||||||
|
template: deploy
|
||||||
|
|
||||||
|
- name: checkout
|
||||||
|
container:
|
||||||
|
image: alpine/git
|
||||||
|
command: [sh, -c]
|
||||||
|
workingDir: /workspace
|
||||||
|
env:
|
||||||
|
- name: REPO_URL
|
||||||
|
value: https://gitea.marcin00.pl/pikram/user-microservice-deploy.git
|
||||||
|
- name: REPO_BRANCH
|
||||||
|
value: argo-deploy
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
git clone --depth 1 --branch "${REPO_BRANCH}" --single-branch "${REPO_URL}" repo
|
||||||
|
volumeMounts:
|
||||||
|
- name: workspace
|
||||||
|
mountPath: /workspace
|
||||||
|
|
||||||
|
- name: deploy
|
||||||
|
container:
|
||||||
|
image: marcin00.azurecr.io/azure-cli-kubectl:latest
|
||||||
|
command: [sh, -c]
|
||||||
|
workingDir: /workspace/repo
|
||||||
|
env:
|
||||||
|
- name: CLIENT_ID
|
||||||
|
value: "c302726f-fafb-4143-94c1-67a70975574a"
|
||||||
|
- name: CLUSTER_NAME
|
||||||
|
value: "build"
|
||||||
|
- name: RESOURCE_GROUP
|
||||||
|
value: "tst-aks-rg"
|
||||||
|
- name: DEPLOY_FILES
|
||||||
|
value: "namespace.yaml secret-store.yaml deploy.yaml ingress.yaml"
|
||||||
|
- name: DEPLOYMENT
|
||||||
|
value: "api"
|
||||||
|
- name: NAMESPACE
|
||||||
|
value: "user-microservice"
|
||||||
|
- name: HEALTHCHECK_URL
|
||||||
|
value: "https://user-microservice.marcin00.pl/health"
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
echo "===> Logging in to Azure"
|
||||||
|
az login --identity --client-id $CLIENT_ID
|
||||||
|
az aks get-credentials --resource-group $RESOURCE_GROUP --name $CLUSTER_NAME --overwrite-existing
|
||||||
|
kubelogin convert-kubeconfig -l azurecli
|
||||||
|
|
||||||
|
echo "===> Applying Kubernetes manifests"
|
||||||
|
for file in $DEPLOY_FILES; do
|
||||||
|
kubectl apply -f "$file"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "===> Waiting for deployment to complete"
|
||||||
|
kubectl rollout status deployment/$DEPLOYMENT -n $NAMESPACE --timeout=60s
|
||||||
|
|
||||||
|
echo "===> Running health check"
|
||||||
|
for i in $(seq 1 120); do
|
||||||
|
if curl -sf $HEALTHCHECK_URL; then
|
||||||
|
echo "Health check OK"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo "Health check failed. Retry $i..."
|
||||||
|
sleep 5
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "Health check failed"
|
||||||
|
exit 1
|
||||||
|
volumeMounts:
|
||||||
|
- name: workspace
|
||||||
|
mountPath: /workspace
|
13
argo-workflows/eventbus-default.yaml
Normal file
13
argo-workflows/eventbus-default.yaml
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: EventBus
|
||||||
|
metadata:
|
||||||
|
name: default
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
nats:
|
||||||
|
native:
|
||||||
|
# Optional, defaults to 3.
|
||||||
|
# If it is < 3, set it to 3, that is the minimal requirement.
|
||||||
|
replicas: 3
|
||||||
|
# Optional, authen strategy, "none" or "token", defaults to "none"
|
||||||
|
auth: token
|
38
argo-workflows/permissions.yaml
Normal file
38
argo-workflows/permissions.yaml
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: ServiceAccount
|
||||||
|
metadata:
|
||||||
|
name: operate-workflow-sa
|
||||||
|
namespace: argo-events
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: rbac.authorization.k8s.io/v1
|
||||||
|
kind: Role
|
||||||
|
metadata:
|
||||||
|
name: operate-workflow-role
|
||||||
|
namespace: argo-events
|
||||||
|
rules:
|
||||||
|
- apiGroups: [ "argoproj.io" ]
|
||||||
|
resources: [ "workflows" ]
|
||||||
|
verbs: [ "*" ]
|
||||||
|
- apiGroups: [ "argoproj.io" ]
|
||||||
|
resources: [ "workflowtaskresults" ]
|
||||||
|
verbs: [ "create", "patch" ]
|
||||||
|
- apiGroups: [ "" ]
|
||||||
|
resources: [ "pods" ]
|
||||||
|
verbs: [ "get", "patch" ]
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: rbac.authorization.k8s.io/v1
|
||||||
|
kind: RoleBinding
|
||||||
|
metadata:
|
||||||
|
name: operate-workflow-role-binding
|
||||||
|
namespace: argo-events
|
||||||
|
roleRef:
|
||||||
|
apiGroup: rbac.authorization.k8s.io
|
||||||
|
kind: Role
|
||||||
|
name: operate-workflow-role
|
||||||
|
subjects:
|
||||||
|
- kind: ServiceAccount
|
||||||
|
name: operate-workflow-sa
|
||||||
|
namespace: argo-events
|
30
argo-workflows/secret-store.yaml
Normal file
30
argo-workflows/secret-store.yaml
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
apiVersion: secrets-store.csi.x-k8s.io/v1
|
||||||
|
kind: SecretProviderClass
|
||||||
|
metadata:
|
||||||
|
name: azure-keyvault
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
provider: azure
|
||||||
|
secretObjects:
|
||||||
|
- secretName: gitea-secrets
|
||||||
|
type: Opaque
|
||||||
|
data:
|
||||||
|
- objectName: gitea-known-host
|
||||||
|
key: GITEA_KNOWN_HOST
|
||||||
|
- objectName: gitea-deploy-key
|
||||||
|
key: GITEA_DEPLOY_KEY
|
||||||
|
parameters:
|
||||||
|
usePodIdentity: "false"
|
||||||
|
useVMManagedIdentity: "true"
|
||||||
|
userAssignedIdentityID: "f91aef65-7d2a-4df8-a884-e33b05d54a31" # client_id of the user-assigned managed identity
|
||||||
|
clientID: "f91aef65-7d2a-4df8-a884-e33b05d54a31" # client_id of the user-assigned managed identity
|
||||||
|
keyvaultName: "dev-aks"
|
||||||
|
objects: |
|
||||||
|
array:
|
||||||
|
- |
|
||||||
|
objectName: gitea-known-host
|
||||||
|
objectType: secret
|
||||||
|
- |
|
||||||
|
objectName: gitea-deploy-key
|
||||||
|
objectType: secret
|
||||||
|
tenantID: "f4e3e6f7-d21c-460e-b201-2192174e7f41"
|
172
argo-workflows/sensor.yaml
Normal file
172
argo-workflows/sensor.yaml
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: Sensor
|
||||||
|
metadata:
|
||||||
|
name: webhook-build
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
serviceAccountName: operate-workflow-sa
|
||||||
|
dependencies:
|
||||||
|
- name: gitea-push
|
||||||
|
eventSourceName: webhook
|
||||||
|
eventName: user-microservice
|
||||||
|
triggers:
|
||||||
|
- template:
|
||||||
|
name: trigger-build-workflow
|
||||||
|
k8s:
|
||||||
|
group: argoproj.io
|
||||||
|
version: v1alpha1
|
||||||
|
resource: workflows
|
||||||
|
operation: create
|
||||||
|
source:
|
||||||
|
resource:
|
||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: Workflow
|
||||||
|
metadata:
|
||||||
|
generateName: build-workflow-
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
entrypoint: main
|
||||||
|
serviceAccountName: operate-workflow-sa
|
||||||
|
volumeClaimTemplates:
|
||||||
|
- metadata:
|
||||||
|
name: workspace
|
||||||
|
spec:
|
||||||
|
accessModes: ["ReadWriteOnce"]
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 128Mi
|
||||||
|
volumes:
|
||||||
|
- name: secrets-store
|
||||||
|
csi:
|
||||||
|
driver: secrets-store.csi.k8s.io
|
||||||
|
readOnly: true
|
||||||
|
volumeAttributes:
|
||||||
|
secretProviderClass: azure-keyvault
|
||||||
|
templates:
|
||||||
|
- name: main
|
||||||
|
steps:
|
||||||
|
- - name: checkout
|
||||||
|
template: checkout
|
||||||
|
- - name: tests
|
||||||
|
template: tests
|
||||||
|
- - name: build-and-push-image
|
||||||
|
template: build-and-push-image
|
||||||
|
arguments:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
value: "{{steps.checkout.outputs.parameters.git-sha}}"
|
||||||
|
- - name: gitops-commit
|
||||||
|
template: gitops-commit
|
||||||
|
arguments:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
value: "{{steps.checkout.outputs.parameters.git-sha}}"
|
||||||
|
- name: checkout
|
||||||
|
container:
|
||||||
|
image: alpine/git
|
||||||
|
command: [sh, -c]
|
||||||
|
workingDir: /workspace
|
||||||
|
env:
|
||||||
|
- name: REPO_URL
|
||||||
|
value: https://gitea.marcin00.pl/pikram/user-microservice.git
|
||||||
|
- name: REPO_BRANCH
|
||||||
|
value: argo-workflows
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
git clone --depth 1 --branch "${REPO_BRANCH}" --single-branch "${REPO_URL}" repo
|
||||||
|
cd repo
|
||||||
|
git rev-parse HEAD > /tmp/gitsha.txt
|
||||||
|
volumeMounts:
|
||||||
|
- name: workspace
|
||||||
|
mountPath: /workspace
|
||||||
|
outputs:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
valueFrom:
|
||||||
|
path: /tmp/gitsha.txt
|
||||||
|
- name: tests
|
||||||
|
script:
|
||||||
|
image: python:3.11.7-alpine
|
||||||
|
command: [sh]
|
||||||
|
workingDir: /workspace/repo/api
|
||||||
|
source: |
|
||||||
|
python3 -m venv env
|
||||||
|
source env/bin/activate
|
||||||
|
pip install -r requirements.txt pytest
|
||||||
|
python3 -m pytest --junit-xml=pytest_junit.xml
|
||||||
|
volumeMounts:
|
||||||
|
- name: workspace
|
||||||
|
mountPath: /workspace
|
||||||
|
- name: build-and-push-image
|
||||||
|
inputs:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
podSpecPatch: |
|
||||||
|
runtimeClassName: sysbox-runc
|
||||||
|
metadata:
|
||||||
|
annotations:
|
||||||
|
io.kubernetes.cri-o.userns-mode: "auto:size=65536"
|
||||||
|
container:
|
||||||
|
image: marcin00.azurecr.io/azure-cli-docker:slim-bookworm
|
||||||
|
command: [sh, -c]
|
||||||
|
workingDir: /workspace/repo
|
||||||
|
env:
|
||||||
|
- name: DOCKER_IMAGE
|
||||||
|
value: marcin00.azurecr.io/user-microservice:{{inputs.parameters.git-sha}}
|
||||||
|
- name: CLIENT_ID
|
||||||
|
value: c302726f-fafb-4143-94c1-67a70975574a
|
||||||
|
- name: ACR_NAME
|
||||||
|
value: marcin00
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
dockerd &
|
||||||
|
docker build -t $DOCKER_IMAGE --build-arg APP_VERSION={{inputs.parameters.git-sha}} --build-arg BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") .
|
||||||
|
az login --identity --client-id ${CLIENT_ID}
|
||||||
|
az acr login --name ${ACR_NAME}
|
||||||
|
docker push ${DOCKER_IMAGE}
|
||||||
|
volumeMounts:
|
||||||
|
- name: workspace
|
||||||
|
mountPath: /workspace
|
||||||
|
- name: gitops-commit
|
||||||
|
inputs:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
container:
|
||||||
|
image: alpine/git
|
||||||
|
command: [sh, -c]
|
||||||
|
env:
|
||||||
|
- name: DEPLOY_REPO_URL
|
||||||
|
value: ssh://git@srv22.mikr.us:20343/pikram/user-microservice-deploy.git
|
||||||
|
- name: DEPLOY_REPO_BRANCH
|
||||||
|
value: argo-deploy
|
||||||
|
- name: CI_COMMIT_SHA
|
||||||
|
value: "{{inputs.parameters.git-sha}}"
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
mkdir -p ~/.ssh
|
||||||
|
cp /mnt/secrets/gitea-known-host ~/.ssh/known_hosts
|
||||||
|
chmod 644 ~/.ssh/known_hosts
|
||||||
|
cp /mnt/secrets/gitea-deploy-key ~/.ssh/id_ed25519
|
||||||
|
chmod 600 ~/.ssh/id_ed25519
|
||||||
|
git config --global user.name "argo[bot]"
|
||||||
|
git config --global user.email "argo@marcin00.pl"
|
||||||
|
git clone --depth 1 --branch $DEPLOY_REPO_BRANCH --single-branch $DEPLOY_REPO_URL repo
|
||||||
|
cd repo
|
||||||
|
awk -v commit="$CI_COMMIT_SHA" '
|
||||||
|
$0 ~ /name:[[:space:]]*api/ { in_api_container = 1; print; next }
|
||||||
|
in_api_container && $0 ~ /^[[:space:]]*image:[[:space:]]*/ {
|
||||||
|
sub(/:[^:[:space:]]+$/, ":" commit)
|
||||||
|
in_api_container = 0
|
||||||
|
print
|
||||||
|
next
|
||||||
|
}
|
||||||
|
{ print }
|
||||||
|
' deploy.yaml > deploy.tmp && mv deploy.tmp deploy.yaml
|
||||||
|
git add deploy.yaml
|
||||||
|
git diff-index --quiet HEAD || git commit -m "Argo: Changed deployed version to $CI_COMMIT_SHA"
|
||||||
|
git push origin $DEPLOY_REPO_BRANCH
|
||||||
|
volumeMounts:
|
||||||
|
- name: secrets-store
|
||||||
|
mountPath: "/mnt/secrets"
|
||||||
|
readOnly: true
|
19
argo-workflows/source.yaml
Normal file
19
argo-workflows/source.yaml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: EventSource
|
||||||
|
metadata:
|
||||||
|
name: webhook
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
service:
|
||||||
|
ports:
|
||||||
|
- port: 12000
|
||||||
|
targetPort: 12000
|
||||||
|
webhook:
|
||||||
|
user-microservice:
|
||||||
|
endpoint: /user-microservice
|
||||||
|
method: POST
|
||||||
|
port: "12000"
|
||||||
|
user-microservice-deploy:
|
||||||
|
endpoint: /user-microservice-deploy
|
||||||
|
method: POST
|
||||||
|
port: "12000"
|
27
argo-workflows/webhook-ingress.yaml
Normal file
27
argo-workflows/webhook-ingress.yaml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: argo-ingress
|
||||||
|
namespace: argo-events
|
||||||
|
annotations:
|
||||||
|
nginx.ingress.kubernetes.io/ssl-redirect: "false"
|
||||||
|
spec:
|
||||||
|
ingressClassName: nginx
|
||||||
|
rules:
|
||||||
|
- host: argo-hook.marcin00.pl
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /user-microservice
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: webhook-eventsource-svc
|
||||||
|
port:
|
||||||
|
number: 12000
|
||||||
|
- path: /user-microservice-deploy
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: webhook-eventsource-svc
|
||||||
|
port:
|
||||||
|
number: 12000
|
16
argo-workflows/webhook-service.yaml
Normal file
16
argo-workflows/webhook-service.yaml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: webhook-eventsource-svc
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
type: ClusterIP
|
||||||
|
ports:
|
||||||
|
- name: default
|
||||||
|
port: 12000
|
||||||
|
protocol: TCP
|
||||||
|
targetPort: 12000
|
||||||
|
selector:
|
||||||
|
controller: eventsource-controller
|
||||||
|
eventsource-name: webhook
|
||||||
|
owner-name: webhook
|
@ -1,67 +0,0 @@
|
|||||||
import matplotlib.pyplot as plt # type: ignore
|
|
||||||
import os
|
|
||||||
import statistics
|
|
||||||
|
|
||||||
# Dane
|
|
||||||
data = {
|
|
||||||
'Jenkins + Jenkins': (165, 158, 217, 164, 136, 135, 147, 145, 138, 134, 137, 129, 136, 142, 125, 138, 133, 136, 128, 131),
|
|
||||||
'Jenkins + ArgoCD': (181, 111, 115, 121, 128, 105, 108, 119, 112, 109, 110, 108, 111, 106, 113, 117, 113, 120, 113, 107),
|
|
||||||
'Jenkins + FluxCD' : (167, 119, 113, 110, 102, 126, 111, 113, 118, 106, 111, 104, 101, 105, 104, 106, 102, 105, 107, 103),
|
|
||||||
'Woodpecker + Woodpecker': (340, 348, 334, 363, 350, 339, 331, 354, 357, 351, 356, 347, 354, 341, 357, 352, 368, 336, 331, 340),
|
|
||||||
'Woodpecker + ArgoCD': (355, 360, 354, 344, 318, 353, 328, 305, 331, 324, 328, 349, 337, 328, 349, 350, 344, 344, 344, 341),
|
|
||||||
'Woodpecker + FluxCD' : (326, 344, 325, 337, 343, 358, 339, 341, 335, 354, 342, 355, 345, 334, 356, 346, 338, 342, 330, 333),
|
|
||||||
'Argo Workflows + Argo-Workflows': (190, 190, 169, 211, 172, 198, 207, 192, 212, 181, 168, 199, 216, 213, 220, 209, 192, 210, 196, 165),
|
|
||||||
'Argo Workflows + ArgoCD': (145, 159, 163, 148, 169, 185, 153, 148, 139, 176, 133, 140, 161, 135, 161, 130, 139, 164, 183, 183),
|
|
||||||
'Argo Workflows + FluxCD': (161, 136, 181, 157, 141, 139, 157, 149, 151, 139, 139, 148, 152, 142, 136, 149, 160, 145, 173, 161)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Wyliczenie średnich
|
|
||||||
labels = list(data.keys())
|
|
||||||
means = [statistics.mean(data[k]) for k in labels]
|
|
||||||
|
|
||||||
# Grupy indeksów do porównań
|
|
||||||
groupings = [
|
|
||||||
[0, 1, 2],
|
|
||||||
[3, 4, 5],
|
|
||||||
[6, 7, 8],
|
|
||||||
[0, 3, 6],
|
|
||||||
[1, 4, 7],
|
|
||||||
[2, 5, 8]
|
|
||||||
]
|
|
||||||
|
|
||||||
# Kolory z palety 'tab10'
|
|
||||||
color_palette = plt.get_cmap('tab10')
|
|
||||||
|
|
||||||
# Folder wyjściowy
|
|
||||||
output_folder = "plots"
|
|
||||||
os.makedirs(output_folder, exist_ok=True)
|
|
||||||
|
|
||||||
# Generowanie wykresów
|
|
||||||
for i, group in enumerate(groupings):
|
|
||||||
group_labels = [labels[j] for j in group]
|
|
||||||
group_means = [means[j] for j in group]
|
|
||||||
colors = [color_palette(j % 10) for j in group] # różne kolory
|
|
||||||
|
|
||||||
plt.figure()
|
|
||||||
bars = plt.bar(group_labels, group_means, color=colors)
|
|
||||||
|
|
||||||
# Oblicz maksymalną wartość, by zwiększyć wysokość osi Y
|
|
||||||
max_val = max(group_means)
|
|
||||||
plt.ylim(0, max_val * 1.15) # dodaj 15% zapasu na tekst
|
|
||||||
|
|
||||||
plt.ylabel("Średni czas wdrożenia (sek)")
|
|
||||||
plt.title(f"Porównanie średnich czasów wdrożenia")
|
|
||||||
plt.xticks(rotation=45)
|
|
||||||
|
|
||||||
# Dodanie wartości nad słupkami
|
|
||||||
for bar in bars:
|
|
||||||
yval = bar.get_height()
|
|
||||||
label = f'{yval:.1f}'.replace('.', ',') # <-- tutaj zamiana kropki na przecinek
|
|
||||||
plt.text(bar.get_x() + bar.get_width()/2.0, yval + max_val * 0.02, label,
|
|
||||||
ha='center', va='bottom', fontsize=9)
|
|
||||||
|
|
||||||
plt.tight_layout()
|
|
||||||
plt.savefig(f"{output_folder}/mean_times_{i}.png")
|
|
||||||
plt.close()
|
|
||||||
|
|
||||||
print("Wszystkie wykresy wygenerowane z dodatkowymi marginesami!")
|
|
@ -36,19 +36,30 @@ fi
|
|||||||
echo "[INFO] Oczekiwanie na wdrożenie nowej wersji..."
|
echo "[INFO] Oczekiwanie na wdrożenie nowej wersji..."
|
||||||
|
|
||||||
# === Odpytywanie endpointa /version ===
|
# === Odpytywanie endpointa /version ===
|
||||||
|
WAITED=0
|
||||||
|
echo "[WAIT] Oczekiwanie na nową wersję..."
|
||||||
|
|
||||||
while true; do
|
while true; do
|
||||||
sleep $CHECK_INTERVAL
|
sleep $CHECK_INTERVAL
|
||||||
|
WAITED=$((WAITED + CHECK_INTERVAL))
|
||||||
|
|
||||||
NEW_VERSION=$(curl -s "$APP_URL" | jq -r '.version')
|
NEW_VERSION=$(curl -s "$APP_URL" | jq -r '.version')
|
||||||
|
|
||||||
if [[ "$NEW_VERSION" != "$OLD_VERSION" ]]; then
|
if [[ "$NEW_VERSION" != "$OLD_VERSION" ]]; then
|
||||||
END_TIME=$(date +%s)
|
END_TIME=$(date +%s)
|
||||||
DURATION=$((END_TIME - START_TIME))
|
DURATION=$((END_TIME - START_TIME))
|
||||||
echo "[INFO] Nowa wersja wdrożona: $NEW_VERSION"
|
|
||||||
|
# Nadpisujemy linię z licznikiem
|
||||||
|
printf "\r[INFO] Nowa wersja wdrożona po %d healtcheck próbach: %s\n" "$WAITED" "$NEW_VERSION"
|
||||||
echo "[INFO] Czas wdrożenia: $DURATION sekund"
|
echo "[INFO] Czas wdrożenia: $DURATION sekund"
|
||||||
|
|
||||||
echo "$START_TIME,$END_TIME,$DURATION,$OLD_VERSION,$NEW_VERSION" >> "$OUTPUT_FILE"
|
echo "$START_TIME,$END_TIME,$DURATION,$OLD_VERSION,$NEW_VERSION" >> "$OUTPUT_FILE"
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
echo "[WAIT] Czekam... ($NEW_VERSION)"
|
# Nadpisujemy TYLKO linię z licznikiem
|
||||||
|
printf "\r[WAIT] Czekam... wykonano %d healtcheck prób" "$WAITED"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# Żeby kursor przeszedł do nowej linii po zakończeniu
|
||||||
|
echo ""
|
||||||
|
1
version_marker.txt
Normal file
1
version_marker.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
1754148549
|
Reference in New Issue
Block a user