Compare commits
26 Commits
dev
...
9e4cc61e30
Author | SHA1 | Date | |
---|---|---|---|
9e4cc61e30 | |||
90dcd19a46 | |||
a5a9c9ec43 | |||
a0a9d7d592 | |||
7ada42d7f8 | |||
2777e73aa2 | |||
77884b291d | |||
c9ffa1c420 | |||
c99b2be62f | |||
239df0af11 | |||
a44bf142ba | |||
3fb4ffd621 | |||
9659af1c9a | |||
a77ec1a6f8 | |||
901805bd01 | |||
2886274d5e | |||
8396169b19 | |||
8eb3dbfd59 | |||
dd248dc0b9 | |||
c8cd08d7ff | |||
0c02c20995 | |||
7b12088952 | |||
7a411a7148 | |||
37ea900325 | |||
2a80c733b3 | |||
3764970082 |
15
Dockerfile
15
Dockerfile
@ -1,18 +1,5 @@
|
|||||||
FROM python:3.11.7-alpine
|
FROM python:3.11.7-slim-bookworm
|
||||||
|
|
||||||
# Wersja i data builda jako build-arg
|
|
||||||
ARG APP_VERSION=unknown
|
|
||||||
ARG BUILD_DATE=unknown
|
|
||||||
|
|
||||||
# Ustawiamy zmienne w ENV, by były dostępne w kontenerze
|
|
||||||
ENV APP_VERSION=$APP_VERSION
|
|
||||||
ENV BUILD_DATE=$BUILD_DATE
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY api .
|
COPY api .
|
||||||
|
|
||||||
RUN apk add --no-cache curl
|
|
||||||
RUN pip install -r requirements.txt
|
RUN pip install -r requirements.txt
|
||||||
|
|
||||||
CMD python3 app.py
|
CMD python3 app.py
|
||||||
|
@ -4,8 +4,7 @@ from flask_jwt_extended import JWTManager
|
|||||||
from jwt import ExpiredSignatureError
|
from jwt import ExpiredSignatureError
|
||||||
from models import db, RevokedToken
|
from models import db, RevokedToken
|
||||||
import os
|
import os
|
||||||
from tech_views import tech_bp
|
from utils import init_db
|
||||||
from utils import init_db, wait_for_db
|
|
||||||
from views import user_bp
|
from views import user_bp
|
||||||
from werkzeug.exceptions import HTTPException
|
from werkzeug.exceptions import HTTPException
|
||||||
|
|
||||||
@ -27,7 +26,6 @@ def create_app(config_name="default"):
|
|||||||
|
|
||||||
# Blueprints registration
|
# Blueprints registration
|
||||||
app.register_blueprint(user_bp)
|
app.register_blueprint(user_bp)
|
||||||
app.register_blueprint(tech_bp)
|
|
||||||
|
|
||||||
# Database and JWT initialization
|
# Database and JWT initialization
|
||||||
db.init_app(app)
|
db.init_app(app)
|
||||||
@ -55,7 +53,6 @@ def create_app(config_name="default"):
|
|||||||
|
|
||||||
# Fill database by initial values (only if we are not testing)
|
# Fill database by initial values (only if we are not testing)
|
||||||
with app.app_context():
|
with app.app_context():
|
||||||
wait_for_db(max_retries=100)
|
|
||||||
db.create_all()
|
db.create_all()
|
||||||
if config_name != "testing":
|
if config_name != "testing":
|
||||||
init_db()
|
init_db()
|
||||||
|
@ -1,20 +0,0 @@
|
|||||||
from flask import Blueprint, jsonify
|
|
||||||
from models import db
|
|
||||||
from sqlalchemy import text
|
|
||||||
from utils import db_ready
|
|
||||||
|
|
||||||
# Blueprint with technical endpoints
|
|
||||||
tech_bp = Blueprint('tech_bp', __name__)
|
|
||||||
|
|
||||||
@tech_bp.route('/health', methods=['GET'])
|
|
||||||
def health_check():
|
|
||||||
"Check if service works and database is functional"
|
|
||||||
try:
|
|
||||||
with db.engine.connect() as connection:
|
|
||||||
connection.execute(text("SELECT 1"))
|
|
||||||
return jsonify(status="healthy"), 200
|
|
||||||
except Exception:
|
|
||||||
if db_ready:
|
|
||||||
return jsonify(status="unhealthy"), 500
|
|
||||||
else:
|
|
||||||
return jsonify(status="starting"), 503
|
|
21
api/utils.py
21
api/utils.py
@ -2,22 +2,17 @@ from flask import abort
|
|||||||
from flask_jwt_extended import get_jwt_identity
|
from flask_jwt_extended import get_jwt_identity
|
||||||
from models import User, db
|
from models import User, db
|
||||||
import os
|
import os
|
||||||
from sqlalchemy import text
|
|
||||||
from sqlalchemy.exc import DatabaseError, InterfaceError
|
|
||||||
import time
|
|
||||||
from werkzeug.security import generate_password_hash
|
from werkzeug.security import generate_password_hash
|
||||||
|
|
||||||
db_ready = False
|
|
||||||
|
|
||||||
def admin_required(user_id, message='Access denied.'):
|
def admin_required(user_id, message='Access denied.'):
|
||||||
"Check if common user try to make administrative action."
|
|
||||||
user = db.session.get(User, user_id)
|
user = db.session.get(User, user_id)
|
||||||
if user is None or user.role != "Administrator":
|
if user is None or user.role != "Administrator":
|
||||||
abort(403, message)
|
abort(403, message)
|
||||||
|
|
||||||
|
|
||||||
def validate_access(owner_id, message='Access denied.'):
|
def validate_access(owner_id, message='Access denied.'):
|
||||||
"Check if user try to access or edit resource that does not belong to them."
|
# Check if user try to access or edit resource that does not belong to them
|
||||||
logged_user_id = int(get_jwt_identity())
|
logged_user_id = int(get_jwt_identity())
|
||||||
logged_user_role = db.session.get(User, logged_user_id).role
|
logged_user_role = db.session.get(User, logged_user_id).role
|
||||||
if logged_user_role != "Administrator" and logged_user_id != owner_id:
|
if logged_user_role != "Administrator" and logged_user_id != owner_id:
|
||||||
@ -32,20 +27,6 @@ def get_user_or_404(user_id):
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
def wait_for_db(max_retries):
|
|
||||||
"Try to connect with database <max_retries> times."
|
|
||||||
global db_ready
|
|
||||||
for _ in range(max_retries):
|
|
||||||
try:
|
|
||||||
with db.engine.connect() as connection:
|
|
||||||
connection.execute(text("SELECT 1"))
|
|
||||||
db_ready = True
|
|
||||||
return
|
|
||||||
except DatabaseError | InterfaceError:
|
|
||||||
time.sleep(3)
|
|
||||||
raise Exception("Failed to connect to database.")
|
|
||||||
|
|
||||||
|
|
||||||
def init_db():
|
def init_db():
|
||||||
"""Create default admin account if database is empty"""
|
"""Create default admin account if database is empty"""
|
||||||
with db.session.begin():
|
with db.session.begin():
|
||||||
|
@ -2,7 +2,6 @@ from flask import Blueprint, jsonify, request, abort
|
|||||||
from flask_jwt_extended import create_access_token, set_access_cookies, jwt_required, \
|
from flask_jwt_extended import create_access_token, set_access_cookies, jwt_required, \
|
||||||
verify_jwt_in_request, get_jwt_identity, unset_jwt_cookies, get_jwt
|
verify_jwt_in_request, get_jwt_identity, unset_jwt_cookies, get_jwt
|
||||||
from models import db, RevokedToken, User
|
from models import db, RevokedToken, User
|
||||||
import os
|
|
||||||
from utils import admin_required, validate_access, get_user_or_404
|
from utils import admin_required, validate_access, get_user_or_404
|
||||||
from werkzeug.security import check_password_hash, generate_password_hash
|
from werkzeug.security import check_password_hash, generate_password_hash
|
||||||
|
|
||||||
@ -111,10 +110,3 @@ def user_logout():
|
|||||||
response = jsonify({"msg": "User logged out successfully."})
|
response = jsonify({"msg": "User logged out successfully."})
|
||||||
unset_jwt_cookies(response)
|
unset_jwt_cookies(response)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@user_bp.route('/version', methods=['GET'])
|
|
||||||
def version():
|
|
||||||
return jsonify({
|
|
||||||
"version": os.getenv("APP_VERSION", "unknown"),
|
|
||||||
"build_time": os.getenv("BUILD_DATE", "unknown")
|
|
||||||
})
|
|
20
argo-workflows/argo-ingress.yaml
Normal file
20
argo-workflows/argo-ingress.yaml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: argo-ingress
|
||||||
|
namespace: argo
|
||||||
|
annotations:
|
||||||
|
nginx.ingress.kubernetes.io/ssl-redirect: "false"
|
||||||
|
spec:
|
||||||
|
ingressClassName: nginx
|
||||||
|
rules:
|
||||||
|
- host: argo.marcin00.pl
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: argo-server
|
||||||
|
port:
|
||||||
|
number: 2746
|
23
argo-workflows/argo-workflow-manager-role.yaml
Normal file
23
argo-workflows/argo-workflow-manager-role.yaml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
apiVersion: rbac.authorization.k8s.io/v1
|
||||||
|
kind: Role
|
||||||
|
metadata:
|
||||||
|
name: argo-workflow-manager
|
||||||
|
namespace: argo-events
|
||||||
|
rules:
|
||||||
|
- apiGroups: ["argoproj.io"]
|
||||||
|
resources: ["workflows", "workflowtemplates", "cronworkflows"]
|
||||||
|
verbs: ["get", "list", "watch"]
|
||||||
|
---
|
||||||
|
apiVersion: rbac.authorization.k8s.io/v1
|
||||||
|
kind: RoleBinding
|
||||||
|
metadata:
|
||||||
|
name: argo-ui-user-read-access
|
||||||
|
namespace: argo-events
|
||||||
|
roleRef:
|
||||||
|
apiGroup: rbac.authorization.k8s.io
|
||||||
|
kind: Role
|
||||||
|
name: argo-workflow-manager
|
||||||
|
subjects:
|
||||||
|
- kind: ServiceAccount
|
||||||
|
name: argo-ui-user
|
||||||
|
namespace: argo
|
13
argo-workflows/eventbus-default.yaml
Normal file
13
argo-workflows/eventbus-default.yaml
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: EventBus
|
||||||
|
metadata:
|
||||||
|
name: default
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
nats:
|
||||||
|
native:
|
||||||
|
# Optional, defaults to 3.
|
||||||
|
# If it is < 3, set it to 3, that is the minimal requirement.
|
||||||
|
replicas: 3
|
||||||
|
# Optional, authen strategy, "none" or "token", defaults to "none"
|
||||||
|
auth: token
|
38
argo-workflows/permissions.yaml
Normal file
38
argo-workflows/permissions.yaml
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: ServiceAccount
|
||||||
|
metadata:
|
||||||
|
name: operate-workflow-sa
|
||||||
|
namespace: argo-events
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: rbac.authorization.k8s.io/v1
|
||||||
|
kind: Role
|
||||||
|
metadata:
|
||||||
|
name: operate-workflow-role
|
||||||
|
namespace: argo-events
|
||||||
|
rules:
|
||||||
|
- apiGroups: [ "argoproj.io" ]
|
||||||
|
resources: [ "workflows" ]
|
||||||
|
verbs: [ "*" ]
|
||||||
|
- apiGroups: [ "argoproj.io" ]
|
||||||
|
resources: [ "workflowtaskresults" ]
|
||||||
|
verbs: [ "create", "patch" ]
|
||||||
|
- apiGroups: [ "" ]
|
||||||
|
resources: [ "pods" ]
|
||||||
|
verbs: [ "get", "patch" ]
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: rbac.authorization.k8s.io/v1
|
||||||
|
kind: RoleBinding
|
||||||
|
metadata:
|
||||||
|
name: operate-workflow-role-binding
|
||||||
|
namespace: argo-events
|
||||||
|
roleRef:
|
||||||
|
apiGroup: rbac.authorization.k8s.io
|
||||||
|
kind: Role
|
||||||
|
name: operate-workflow-role
|
||||||
|
subjects:
|
||||||
|
- kind: ServiceAccount
|
||||||
|
name: operate-workflow-sa
|
||||||
|
namespace: argo-events
|
30
argo-workflows/secret-store.yaml
Normal file
30
argo-workflows/secret-store.yaml
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
apiVersion: secrets-store.csi.x-k8s.io/v1
|
||||||
|
kind: SecretProviderClass
|
||||||
|
metadata:
|
||||||
|
name: azure-keyvault
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
provider: azure
|
||||||
|
secretObjects:
|
||||||
|
- secretName: gitea-secrets
|
||||||
|
type: Opaque
|
||||||
|
data:
|
||||||
|
- objectName: gitea-known-host
|
||||||
|
key: GITEA_KNOWN_HOST
|
||||||
|
- objectName: gitea-deploy-key
|
||||||
|
key: GITEA_DEPLOY_KEY
|
||||||
|
parameters:
|
||||||
|
usePodIdentity: "false"
|
||||||
|
useVMManagedIdentity: "true"
|
||||||
|
userAssignedIdentityID: "f91aef65-7d2a-4df8-a884-e33b05d54a31" # client_id of the user-assigned managed identity
|
||||||
|
clientID: "f91aef65-7d2a-4df8-a884-e33b05d54a31" # client_id of the user-assigned managed identity
|
||||||
|
keyvaultName: "dev-aks"
|
||||||
|
objects: |
|
||||||
|
array:
|
||||||
|
- |
|
||||||
|
objectName: gitea-known-host
|
||||||
|
objectType: secret
|
||||||
|
- |
|
||||||
|
objectName: gitea-deploy-key
|
||||||
|
objectType: secret
|
||||||
|
tenantID: "f4e3e6f7-d21c-460e-b201-2192174e7f41"
|
172
argo-workflows/sensor.yaml
Normal file
172
argo-workflows/sensor.yaml
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: Sensor
|
||||||
|
metadata:
|
||||||
|
name: webhook-build
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
serviceAccountName: operate-workflow-sa
|
||||||
|
dependencies:
|
||||||
|
- name: gitea-push
|
||||||
|
eventSourceName: webhook
|
||||||
|
eventName: test-hook
|
||||||
|
triggers:
|
||||||
|
- template:
|
||||||
|
name: trigger-build-workflow
|
||||||
|
k8s:
|
||||||
|
group: argoproj.io
|
||||||
|
version: v1alpha1
|
||||||
|
resource: workflows
|
||||||
|
operation: create
|
||||||
|
source:
|
||||||
|
resource:
|
||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: Workflow
|
||||||
|
metadata:
|
||||||
|
generateName: build-workflow-
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
entrypoint: main
|
||||||
|
serviceAccountName: operate-workflow-sa
|
||||||
|
volumeClaimTemplates:
|
||||||
|
- metadata:
|
||||||
|
name: workspace
|
||||||
|
spec:
|
||||||
|
accessModes: ["ReadWriteOnce"]
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 128Mi
|
||||||
|
volumes:
|
||||||
|
- name: secrets-store
|
||||||
|
csi:
|
||||||
|
driver: secrets-store.csi.k8s.io
|
||||||
|
readOnly: true
|
||||||
|
volumeAttributes:
|
||||||
|
secretProviderClass: azure-keyvault
|
||||||
|
templates:
|
||||||
|
- name: main
|
||||||
|
steps:
|
||||||
|
- - name: checkout
|
||||||
|
template: checkout
|
||||||
|
- - name: tests
|
||||||
|
template: tests
|
||||||
|
- - name: build-and-push-image
|
||||||
|
template: build-and-push-image
|
||||||
|
arguments:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
value: "{{steps.checkout.outputs.parameters.git-sha}}"
|
||||||
|
- - name: gitops-commit
|
||||||
|
template: gitops-commit
|
||||||
|
arguments:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
value: "{{steps.checkout.outputs.parameters.git-sha}}"
|
||||||
|
- name: checkout
|
||||||
|
container:
|
||||||
|
image: alpine/git
|
||||||
|
command: [sh, -c]
|
||||||
|
workingDir: /workspace
|
||||||
|
env:
|
||||||
|
- name: REPO_URL
|
||||||
|
value: https://gitea.marcin00.pl/pikram/user-microservice.git
|
||||||
|
- name: REPO_BRANCH
|
||||||
|
value: argo-workflows
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
git clone --depth 1 --branch "${REPO_BRANCH}" --single-branch "${REPO_URL}" repo
|
||||||
|
cd repo
|
||||||
|
git rev-parse HEAD > /tmp/gitsha.txt
|
||||||
|
volumeMounts:
|
||||||
|
- name: workspace
|
||||||
|
mountPath: /workspace
|
||||||
|
outputs:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
valueFrom:
|
||||||
|
path: /tmp/gitsha.txt
|
||||||
|
- name: tests
|
||||||
|
script:
|
||||||
|
image: python:3.11.7-alpine
|
||||||
|
command: [sh]
|
||||||
|
workingDir: /workspace/repo/api
|
||||||
|
source: |
|
||||||
|
python3 -m venv env
|
||||||
|
source env/bin/activate
|
||||||
|
pip install -r requirements.txt pytest
|
||||||
|
python3 -m pytest --junit-xml=pytest_junit.xml
|
||||||
|
volumeMounts:
|
||||||
|
- name: workspace
|
||||||
|
mountPath: /workspace
|
||||||
|
- name: build-and-push-image
|
||||||
|
inputs:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
podSpecPatch: |
|
||||||
|
runtimeClassName: sysbox-runc
|
||||||
|
metadata:
|
||||||
|
annotations:
|
||||||
|
io.kubernetes.cri-o.userns-mode: "auto:size=65536"
|
||||||
|
container:
|
||||||
|
image: marcin00.azurecr.io/azure-cli-docker:slim-bookworm
|
||||||
|
command: [sh, -c]
|
||||||
|
workingDir: /workspace/repo
|
||||||
|
env:
|
||||||
|
- name: DOCKER_IMAGE
|
||||||
|
value: marcin00.azurecr.io/user-microservice:{{inputs.parameters.git-sha}}
|
||||||
|
- name: CLIENT_ID
|
||||||
|
value: c302726f-fafb-4143-94c1-67a70975574a
|
||||||
|
- name: ACR_NAME
|
||||||
|
value: marcin00
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
dockerd &
|
||||||
|
docker build -t $DOCKER_IMAGE --build-arg APP_VERSION={{inputs.parameters.git-sha}} --build-arg BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") .
|
||||||
|
az login --identity --client-id ${CLIENT_ID}
|
||||||
|
az acr login --name ${ACR_NAME}
|
||||||
|
docker push ${DOCKER_IMAGE}
|
||||||
|
volumeMounts:
|
||||||
|
- name: workspace
|
||||||
|
mountPath: /workspace
|
||||||
|
- name: gitops-commit
|
||||||
|
inputs:
|
||||||
|
parameters:
|
||||||
|
- name: git-sha
|
||||||
|
container:
|
||||||
|
image: alpine/git
|
||||||
|
command: [sh, -c]
|
||||||
|
env:
|
||||||
|
- name: DEPLOY_REPO_URL
|
||||||
|
value: ssh://git@srv22.mikr.us:20343/pikram/user-microservice-deploy.git
|
||||||
|
- name: DEPLOY_REPO_BRANCH
|
||||||
|
value: argo-deploy
|
||||||
|
- name: CI_COMMIT_SHA
|
||||||
|
value: "{{inputs.parameters.git-sha}}"
|
||||||
|
args:
|
||||||
|
- |
|
||||||
|
mkdir -p ~/.ssh
|
||||||
|
cp /mnt/secrets/gitea-known-host ~/.ssh/known_hosts
|
||||||
|
chmod 644 ~/.ssh/known_hosts
|
||||||
|
cp /mnt/secrets/gitea-deploy-key ~/.ssh/id_ed25519
|
||||||
|
chmod 600 ~/.ssh/id_ed25519
|
||||||
|
git config --global user.name "argo[bot]"
|
||||||
|
git config --global user.email "argo@marcin00.pl"
|
||||||
|
git clone --depth 1 --branch $DEPLOY_REPO_BRANCH --single-branch $DEPLOY_REPO_URL repo
|
||||||
|
cd repo
|
||||||
|
awk -v commit="$CI_COMMIT_SHA" '
|
||||||
|
$0 ~ /name:[[:space:]]*api/ { in_api_container = 1; print; next }
|
||||||
|
in_api_container && $0 ~ /^[[:space:]]*image:[[:space:]]*/ {
|
||||||
|
sub(/:[^:[:space:]]+$/, ":" commit)
|
||||||
|
in_api_container = 0
|
||||||
|
print
|
||||||
|
next
|
||||||
|
}
|
||||||
|
{ print }
|
||||||
|
' deploy.yaml > deploy.tmp && mv deploy.tmp deploy.yaml
|
||||||
|
git add deploy.yaml
|
||||||
|
git diff-index --quiet HEAD || git commit -m "Argo: Changed deployed version to $CI_COMMIT_SHA"
|
||||||
|
git push origin $DEPLOY_REPO_BRANCH
|
||||||
|
volumeMounts:
|
||||||
|
- name: secrets-store
|
||||||
|
mountPath: "/mnt/secrets"
|
||||||
|
readOnly: true
|
15
argo-workflows/source.yaml
Normal file
15
argo-workflows/source.yaml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: EventSource
|
||||||
|
metadata:
|
||||||
|
name: webhook
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
service:
|
||||||
|
ports:
|
||||||
|
- port: 12000
|
||||||
|
targetPort: 12000
|
||||||
|
webhook:
|
||||||
|
test-hook:
|
||||||
|
endpoint: /gitea-hook
|
||||||
|
method: POST
|
||||||
|
port: "12000"
|
20
argo-workflows/webhook-ingress.yaml
Normal file
20
argo-workflows/webhook-ingress.yaml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: argo-ingress
|
||||||
|
namespace: argo-events
|
||||||
|
annotations:
|
||||||
|
nginx.ingress.kubernetes.io/ssl-redirect: "false"
|
||||||
|
spec:
|
||||||
|
ingressClassName: nginx
|
||||||
|
rules:
|
||||||
|
- host: argo-hook.marcin00.pl
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /gitea-hook
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: webhook-eventsource-svc
|
||||||
|
port:
|
||||||
|
number: 12000
|
16
argo-workflows/webhook-service.yaml
Normal file
16
argo-workflows/webhook-service.yaml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: webhook-eventsource-svc
|
||||||
|
namespace: argo-events
|
||||||
|
spec:
|
||||||
|
type: ClusterIP
|
||||||
|
ports:
|
||||||
|
- name: default
|
||||||
|
port: 12000
|
||||||
|
protocol: TCP
|
||||||
|
targetPort: 12000
|
||||||
|
selector:
|
||||||
|
controller: eventsource-controller
|
||||||
|
eventsource-name: webhook
|
||||||
|
owner-name: webhook
|
@ -1,67 +0,0 @@
|
|||||||
import matplotlib.pyplot as plt # type: ignore
|
|
||||||
import os
|
|
||||||
import statistics
|
|
||||||
|
|
||||||
# Dane
|
|
||||||
data = {
|
|
||||||
'Jenkins + Jenkins': (165, 158, 217, 164, 136, 135, 147, 145, 138, 134, 137, 129, 136, 142, 125, 138, 133, 136, 128, 131),
|
|
||||||
'Jenkins + ArgoCD': (181, 111, 115, 121, 128, 105, 108, 119, 112, 109, 110, 108, 111, 106, 113, 117, 113, 120, 113, 107),
|
|
||||||
'Jenkins + FluxCD' : (167, 119, 113, 110, 102, 126, 111, 113, 118, 106, 111, 104, 101, 105, 104, 106, 102, 105, 107, 103),
|
|
||||||
'Woodpecker + Woodpecker': (340, 348, 334, 363, 350, 339, 331, 354, 357, 351, 356, 347, 354, 341, 357, 352, 368, 336, 331, 340),
|
|
||||||
'Woodpecker + ArgoCD': (355, 360, 354, 344, 318, 353, 328, 305, 331, 324, 328, 349, 337, 328, 349, 350, 344, 344, 344, 341),
|
|
||||||
'Woodpecker + FluxCD' : (326, 344, 325, 337, 343, 358, 339, 341, 335, 354, 342, 355, 345, 334, 356, 346, 338, 342, 330, 333),
|
|
||||||
'Argo Workflows + Argo-Workflows': (190, 190, 169, 211, 172, 198, 207, 192, 212, 181, 168, 199, 216, 213, 220, 209, 192, 210, 196, 165),
|
|
||||||
'Argo Workflows + ArgoCD': (145, 159, 163, 148, 169, 185, 153, 148, 139, 176, 133, 140, 161, 135, 161, 130, 139, 164, 183, 183),
|
|
||||||
'Argo Workflows + FluxCD': (161, 136, 181, 157, 141, 139, 157, 149, 151, 139, 139, 148, 152, 142, 136, 149, 160, 145, 173, 161)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Wyliczenie średnich
|
|
||||||
labels = list(data.keys())
|
|
||||||
means = [statistics.mean(data[k]) for k in labels]
|
|
||||||
|
|
||||||
# Grupy indeksów do porównań
|
|
||||||
groupings = [
|
|
||||||
[0, 1, 2],
|
|
||||||
[3, 4, 5],
|
|
||||||
[6, 7, 8],
|
|
||||||
[0, 3, 6],
|
|
||||||
[1, 4, 7],
|
|
||||||
[2, 5, 8]
|
|
||||||
]
|
|
||||||
|
|
||||||
# Kolory z palety 'tab10'
|
|
||||||
color_palette = plt.get_cmap('tab10')
|
|
||||||
|
|
||||||
# Folder wyjściowy
|
|
||||||
output_folder = "plots"
|
|
||||||
os.makedirs(output_folder, exist_ok=True)
|
|
||||||
|
|
||||||
# Generowanie wykresów
|
|
||||||
for i, group in enumerate(groupings):
|
|
||||||
group_labels = [labels[j] for j in group]
|
|
||||||
group_means = [means[j] for j in group]
|
|
||||||
colors = [color_palette(j % 10) for j in group] # różne kolory
|
|
||||||
|
|
||||||
plt.figure()
|
|
||||||
bars = plt.bar(group_labels, group_means, color=colors)
|
|
||||||
|
|
||||||
# Oblicz maksymalną wartość, by zwiększyć wysokość osi Y
|
|
||||||
max_val = max(group_means)
|
|
||||||
plt.ylim(0, max_val * 1.15) # dodaj 15% zapasu na tekst
|
|
||||||
|
|
||||||
plt.ylabel("Średni czas wdrożenia (sek)")
|
|
||||||
plt.title(f"Porównanie średnich czasów wdrożenia")
|
|
||||||
plt.xticks(rotation=45)
|
|
||||||
|
|
||||||
# Dodanie wartości nad słupkami
|
|
||||||
for bar in bars:
|
|
||||||
yval = bar.get_height()
|
|
||||||
label = f'{yval:.1f}'.replace('.', ',') # <-- tutaj zamiana kropki na przecinek
|
|
||||||
plt.text(bar.get_x() + bar.get_width()/2.0, yval + max_val * 0.02, label,
|
|
||||||
ha='center', va='bottom', fontsize=9)
|
|
||||||
|
|
||||||
plt.tight_layout()
|
|
||||||
plt.savefig(f"{output_folder}/mean_times_{i}.png")
|
|
||||||
plt.close()
|
|
||||||
|
|
||||||
print("Wszystkie wykresy wygenerowane z dodatkowymi marginesami!")
|
|
@ -1,54 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# === KONFIGURACJA ===
|
|
||||||
APP_URL="https://user-microservice.marcin00.pl/version"
|
|
||||||
MARKER_FILE="version_marker.txt"
|
|
||||||
OUTPUT_FILE="deployment_times.csv"
|
|
||||||
CHECK_INTERVAL=1 # sekundy
|
|
||||||
|
|
||||||
# === POBRANIE AKTUALNEJ WERSJI APLIKACJI ===
|
|
||||||
echo "[INFO] Pobieranie aktualnej wersji z /version..."
|
|
||||||
OLD_VERSION=$(curl -s "$APP_URL" | jq -r '.version')
|
|
||||||
|
|
||||||
if [[ -z "$OLD_VERSION" ]]; then
|
|
||||||
echo "[ERROR] Nie udało się pobrać aktualnej wersji aplikacji."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "[INFO] Aktualna wersja: $OLD_VERSION"
|
|
||||||
|
|
||||||
# === Modyfikacja pliku, commit i push ===
|
|
||||||
TIMESTAMP=$(date +%s)
|
|
||||||
echo "$TIMESTAMP" > "$MARKER_FILE"
|
|
||||||
|
|
||||||
git add "$MARKER_FILE"
|
|
||||||
git commit -m "Automatyczna zmiana: $TIMESTAMP"
|
|
||||||
START_TIME=$(date +%s)
|
|
||||||
|
|
||||||
echo "[INFO] Wykonuję git push..."
|
|
||||||
git push
|
|
||||||
|
|
||||||
if [[ $? -ne 0 ]]; then
|
|
||||||
echo "[ERROR] Push nie powiódł się."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "[INFO] Oczekiwanie na wdrożenie nowej wersji..."
|
|
||||||
|
|
||||||
# === Odpytywanie endpointa /version ===
|
|
||||||
while true; do
|
|
||||||
sleep $CHECK_INTERVAL
|
|
||||||
NEW_VERSION=$(curl -s "$APP_URL" | jq -r '.version')
|
|
||||||
|
|
||||||
if [[ "$NEW_VERSION" != "$OLD_VERSION" ]]; then
|
|
||||||
END_TIME=$(date +%s)
|
|
||||||
DURATION=$((END_TIME - START_TIME))
|
|
||||||
echo "[INFO] Nowa wersja wdrożona: $NEW_VERSION"
|
|
||||||
echo "[INFO] Czas wdrożenia: $DURATION sekund"
|
|
||||||
|
|
||||||
echo "$START_TIME,$END_TIME,$DURATION,$OLD_VERSION,$NEW_VERSION" >> "$OUTPUT_FILE"
|
|
||||||
break
|
|
||||||
else
|
|
||||||
echo "[WAIT] Czekam... ($NEW_VERSION)"
|
|
||||||
fi
|
|
||||||
done
|
|
@ -7,24 +7,9 @@ services:
|
|||||||
build: .
|
build: .
|
||||||
env_file:
|
env_file:
|
||||||
- api/.env
|
- api/.env
|
||||||
ports:
|
|
||||||
- 80:80
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost/health"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 5
|
|
||||||
start_period: 15s
|
|
||||||
db:
|
db:
|
||||||
container_name: db
|
container_name: db
|
||||||
hostname: db
|
hostname: db
|
||||||
image: mysql:latest
|
image: mysql:latest
|
||||||
env_file:
|
env_file:
|
||||||
- db/.env
|
- db/.env
|
||||||
ports:
|
|
||||||
- 3306:3306
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 5
|
|
||||||
|
Reference in New Issue
Block a user