15 Commits

15 changed files with 202 additions and 307 deletions

View File

@ -1,5 +1,18 @@
FROM python:3.11.7-slim-bookworm FROM python:3.11.7-alpine
# Wersja i data builda jako build-arg
ARG APP_VERSION=unknown
ARG BUILD_DATE=unknown
# Ustawiamy zmienne w ENV, by były dostępne w kontenerze
ENV APP_VERSION=$APP_VERSION
ENV BUILD_DATE=$BUILD_DATE
WORKDIR /app WORKDIR /app
COPY api . COPY api .
RUN apk add --no-cache curl
RUN pip install -r requirements.txt RUN pip install -r requirements.txt
CMD python3 app.py CMD python3 app.py

72
Jenkinsfile vendored
View File

@ -1,72 +0,0 @@
pipeline {
agent any
environment {
DOCKER_REGISTRY_URL = 'marcin00.azurecr.io'
DOCKER_IMAGE = "${DOCKER_REGISTRY_URL}/user-microservice:${GIT_COMMIT}"
ACR_NAME = 'marcin00'
}
stages {
stage('Checkout') {
steps {
checkout scm
}
}
stage('Test python app') {
steps {
script {
dir('api') {
sh '''
python3 -m venv env
source env/bin/activate
pip install -r requirements.txt pytest
python3 -m pytest --junit-xml=pytest_junit.xml
'''
}
}
}
post {
always {
junit testResults: '**/*pytest_junit.xml'
}
}
}
stage('Build & test docker image') {
steps {
script {
appImage = docker.build("${DOCKER_IMAGE}")
sh label: 'Install dgoss', script: '''
curl -s -L https://github.com/aelsabbahy/goss/releases/latest/download/goss-linux-amd64 -o goss
curl -s -L https://github.com/aelsabbahy/goss/releases/latest/download/dgoss -o dgoss
chmod +rx *goss
'''
withEnv(['GOSS_OPTS=-f junit', 'GOSS_PATH=./goss', 'GOSS_SLEEP=3', 'SQLALCHEMY_DATABASE_URI=sqlite:///:memory:']) {
sh label: 'run image tests', script: './dgoss run -e SQLALCHEMY_DATABASE_URI=sqlite:///:memory: ${DOCKER_IMAGE} > goss_junit.xml'
}
}
}
post {
always {
junit testResults: '**/*goss_junit.xml'
}
}
}
stage('Deploy') {
steps {
script {
sh '''
az login --identity
az acr login --name ${ACR_NAME}
docker push ${DOCKER_IMAGE}
'''
}
}
}
}
post {
cleanup {
script { cleanWs() }
}
}
}

View File

@ -4,7 +4,8 @@ from flask_jwt_extended import JWTManager
from jwt import ExpiredSignatureError from jwt import ExpiredSignatureError
from models import db, RevokedToken from models import db, RevokedToken
import os import os
from utils import init_db from tech_views import tech_bp
from utils import init_db, wait_for_db
from views import user_bp from views import user_bp
from werkzeug.exceptions import HTTPException from werkzeug.exceptions import HTTPException
@ -26,6 +27,7 @@ def create_app(config_name="default"):
# Blueprints registration # Blueprints registration
app.register_blueprint(user_bp) app.register_blueprint(user_bp)
app.register_blueprint(tech_bp)
# Database and JWT initialization # Database and JWT initialization
db.init_app(app) db.init_app(app)
@ -53,6 +55,7 @@ def create_app(config_name="default"):
# Fill database by initial values (only if we are not testing) # Fill database by initial values (only if we are not testing)
with app.app_context(): with app.app_context():
wait_for_db(max_retries=100)
db.create_all() db.create_all()
if config_name != "testing": if config_name != "testing":
init_db() init_db()

20
api/tech_views.py Normal file
View File

@ -0,0 +1,20 @@
from flask import Blueprint, jsonify
from models import db
from sqlalchemy import text
from utils import db_ready
# Blueprint with technical endpoints
tech_bp = Blueprint('tech_bp', __name__)
@tech_bp.route('/health', methods=['GET'])
def health_check():
"Check if service works and database is functional"
try:
with db.engine.connect() as connection:
connection.execute(text("SELECT 1"))
return jsonify(status="healthy"), 200
except Exception:
if db_ready:
return jsonify(status="unhealthy"), 500
else:
return jsonify(status="starting"), 503

View File

@ -2,17 +2,22 @@ from flask import abort
from flask_jwt_extended import get_jwt_identity from flask_jwt_extended import get_jwt_identity
from models import User, db from models import User, db
import os import os
from sqlalchemy import text
from sqlalchemy.exc import DatabaseError, InterfaceError
import time
from werkzeug.security import generate_password_hash from werkzeug.security import generate_password_hash
db_ready = False
def admin_required(user_id, message='Access denied.'): def admin_required(user_id, message='Access denied.'):
"Check if common user try to make administrative action."
user = db.session.get(User, user_id) user = db.session.get(User, user_id)
if user is None or user.role != "Administrator": if user is None or user.role != "Administrator":
abort(403, message) abort(403, message)
def validate_access(owner_id, message='Access denied.'): def validate_access(owner_id, message='Access denied.'):
# Check if user try to access or edit resource that does not belong to them "Check if user try to access or edit resource that does not belong to them."
logged_user_id = int(get_jwt_identity()) logged_user_id = int(get_jwt_identity())
logged_user_role = db.session.get(User, logged_user_id).role logged_user_role = db.session.get(User, logged_user_id).role
if logged_user_role != "Administrator" and logged_user_id != owner_id: if logged_user_role != "Administrator" and logged_user_id != owner_id:
@ -27,6 +32,20 @@ def get_user_or_404(user_id):
return user return user
def wait_for_db(max_retries):
"Try to connect with database <max_retries> times."
global db_ready
for _ in range(max_retries):
try:
with db.engine.connect() as connection:
connection.execute(text("SELECT 1"))
db_ready = True
return
except DatabaseError | InterfaceError:
time.sleep(3)
raise Exception("Failed to connect to database.")
def init_db(): def init_db():
"""Create default admin account if database is empty""" """Create default admin account if database is empty"""
with db.session.begin(): with db.session.begin():

View File

@ -2,6 +2,7 @@ from flask import Blueprint, jsonify, request, abort
from flask_jwt_extended import create_access_token, set_access_cookies, jwt_required, \ from flask_jwt_extended import create_access_token, set_access_cookies, jwt_required, \
verify_jwt_in_request, get_jwt_identity, unset_jwt_cookies, get_jwt verify_jwt_in_request, get_jwt_identity, unset_jwt_cookies, get_jwt
from models import db, RevokedToken, User from models import db, RevokedToken, User
import os
from utils import admin_required, validate_access, get_user_or_404 from utils import admin_required, validate_access, get_user_or_404
from werkzeug.security import check_password_hash, generate_password_hash from werkzeug.security import check_password_hash, generate_password_hash
@ -110,3 +111,10 @@ def user_logout():
response = jsonify({"msg": "User logged out successfully."}) response = jsonify({"msg": "User logged out successfully."})
unset_jwt_cookies(response) unset_jwt_cookies(response)
return response return response
@user_bp.route('/version', methods=['GET'])
def version():
return jsonify({
"version": os.getenv("APP_VERSION", "unknown"),
"build_time": os.getenv("BUILD_DATE", "unknown")
})

View File

@ -1,5 +0,0 @@
apiVersion: v1
kind: ServiceAccount
metadata:
name: argo-workflows-user
namespace: argo

View File

@ -1,12 +0,0 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
namespace: argo
name: argo-workflow-manager
rules:
- apiGroups: ["argoproj.io"]
resources: ["workflowtaskresults"]
verbs: ["create", "get", "list", "update", "patch", "delete"]
- apiGroups: ["argoproj.io"]
resources: ["workflows"]
verbs: ["create", "get", "list", "update", "patch", "delete"]

View File

@ -1,166 +0,0 @@
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: build-workflow-
spec:
entrypoint: main
serviceAccountName: argo-workflows-user
volumeClaimTemplates:
- metadata:
name: workspace
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 128Mi
volumes:
- name: secrets-store
csi:
driver: secrets-store.csi.k8s.io
readOnly: true
volumeAttributes:
secretProviderClass: azure-keyvault
templates:
# Main steps sequence
- name: main
steps:
- - name: checkout
template: checkout
- - name: tests
template: tests
- - name: build-and-push-image
template: build-and-push-image
arguments:
parameters:
- name: git-sha
value: "{{steps.checkout.outputs.parameters.git-sha}}"
- - name: gitops-commit
template: gitops-commit
arguments:
parameters:
- name: git-sha
value: "{{steps.checkout.outputs.parameters.git-sha}}"
# GIT CHECKOUT
- name: checkout
container:
image: alpine/git
command: [sh,-c]
workingDir: /workspace
env:
- name: REPO_URL
value: https://gitea.marcin00.pl/pikram/user-microservice.git
- name: REPO_BRANCH
value: dev
args:
- |
git clone --depth 1 --branch "${REPO_BRANCH}" --single-branch "${REPO_URL}" repo
cd repo
git rev-parse HEAD > /tmp/gitsha.txt
volumeMounts:
- name: workspace
mountPath: /workspace
outputs:
parameters:
- name: git-sha
valueFrom:
path: /tmp/gitsha.txt
# PYTHON TESTS
- name: tests
script:
image: python:3.11.7-alpine
command: [sh]
workingDir: /workspace/repo/api
source: |
python3 -m venv env
source env/bin/activate
pip install -r requirements.txt pytest
python3 -m pytest --junit-xml=pytest_junit.xml
volumeMounts:
- name: workspace
mountPath: /workspace
# BUILD AND PUSH DOCKER IMAGE
- name: build-and-push-image
inputs:
parameters:
- name: git-sha
podSpecPatch: |
runtimeClassName: sysbox-runc
metadata:
annotations:
io.kubernetes.cri-o.userns-mode: "auto:size=65536"
container:
image: marcin00.azurecr.io/azure-cli-docker:slim-bookworm
command: [sh, -c]
workingDir: /workspace/repo
env:
- name: DOCKER_IMAGE
value: marcin00.azurecr.io/user-microservice:{{inputs.parameters.git-sha}}
- name: CLIENT_ID
value: c302726f-fafb-4143-94c1-67a70975574a # Client id tożsamości zarządzanej używanej przez klaster
- name: ACR_NAME
value: marcin00
args:
- |
dockerd &
docker build -t $DOCKER_IMAGE --build-arg APP_VERSION={{inputs.parameters.git-sha}} --build-arg BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") .
az login --identity --client-id ${CLIENT_ID}
az acr login --name ${ACR_NAME}
docker push ${DOCKER_IMAGE}
volumeMounts:
- name: workspace
mountPath: /workspace
- name: gitops-commit
inputs:
parameters:
- name: git-sha
container:
image: alpine/git
command: [sh, -c]
env:
- name: DEPLOY_REPO_URL
value: ssh://git@srv22.mikr.us:20343/pikram/user-microservice-deploy.git
- name: DEPLOY_REPO_BRANCH
value: argo-deploy
- name: CI_COMMIT_SHA
value: "{{inputs.parameters.git-sha}}"
args:
- |
mkdir -p ~/.ssh
cp /mnt/secrets/gitea-known-host ~/.ssh/known_hosts
chmod 644 ~/.ssh/known_hosts
cp /mnt/secrets/gitea-deploy-key ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
git config --global user.name "argo[bot]"
git config --global user.email "argo@marcin00.pl"
git clone --depth 1 --branch $DEPLOY_REPO_BRANCH --single-branch $DEPLOY_REPO_URL repo
cd repo
|
awk -v commit="$CI_COMMIT_SHA" '
$0 ~ /name:[[:space:]]*api/ { in_api_container = 1; print; next }
in_api_container && $0 ~ /^[[:space:]]*image:[[:space:]]*/ {
sub(/:[^:[:space:]]+$/, ":" commit)
in_api_container = 0
print
next
}
{ print }
' deploy.yaml > deploy.tmp && mv deploy.tmp deploy.yaml
git add deploy.yaml
'git diff-index --quiet HEAD || git commit -m "Argo: Changed deployed version to $CI_COMMIT_SHA"'
git push origin $DEPLOY_REPO_BRANCH
volumeMounts:
- name: secrets-store
mountPath: "/mnt/secrets"
readOnly: true

View File

@ -1,13 +0,0 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: argo-workflows-role-binding
namespace: argo
subjects:
- kind: ServiceAccount
name: argo-workflows-user
namespace: argo
roleRef:
kind: Role
name: argo-workflow-manager
apiGroup: rbac.authorization.k8s.io

View File

@ -1,28 +0,0 @@
apiVersion: secrets-store.csi.x-k8s.io/v1
kind: SecretProviderClass
metadata:
name: azure-keyvault
namespace: argo
spec:
provider: azure
secretObjects:
- secretName: acr-creds
type: Opaque
data:
- objectName: acr-password
- key: password
parameters:
usePodIdentity: "false"
useVMManagedIdentity: "true"
userAssignedIdentityID: "f91aef65-7d2a-4df8-a884-e33b05d54a31" # client_id of the user-assigned managed identity
clientID: "f91aef65-7d2a-4df8-a884-e33b05d54a31" # client_id of the user-assigned managed identity
keyvaultName: "dev-aks"
objects: |
array:
- |
objectName: gitea-known-host
objectType: secret
- |
objectName: gitea-deploy-key
objectType: secret
tenantID: "f4e3e6f7-d21c-460e-b201-2192174e7f41"

67
data_analysis.py Normal file
View File

@ -0,0 +1,67 @@
import matplotlib.pyplot as plt # type: ignore
import os
import statistics
# Dane
data = {
'Jenkins + Jenkins': (165, 158, 217, 164, 136, 135, 147, 145, 138, 134, 137, 129, 136, 142, 125, 138, 133, 136, 128, 131),
'Jenkins + ArgoCD': (181, 111, 115, 121, 128, 105, 108, 119, 112, 109, 110, 108, 111, 106, 113, 117, 113, 120, 113, 107),
'Jenkins + FluxCD' : (167, 119, 113, 110, 102, 126, 111, 113, 118, 106, 111, 104, 101, 105, 104, 106, 102, 105, 107, 103),
'Woodpecker + Woodpecker': (340, 348, 334, 363, 350, 339, 331, 354, 357, 351, 356, 347, 354, 341, 357, 352, 368, 336, 331, 340),
'Woodpecker + ArgoCD': (355, 360, 354, 344, 318, 353, 328, 305, 331, 324, 328, 349, 337, 328, 349, 350, 344, 344, 344, 341),
'Woodpecker + FluxCD' : (326, 344, 325, 337, 343, 358, 339, 341, 335, 354, 342, 355, 345, 334, 356, 346, 338, 342, 330, 333),
'Argo Workflows + Argo-Workflows': (190, 190, 169, 211, 172, 198, 207, 192, 212, 181, 168, 199, 216, 213, 220, 209, 192, 210, 196, 165),
'Argo Workflows + ArgoCD': (145, 159, 163, 148, 169, 185, 153, 148, 139, 176, 133, 140, 161, 135, 161, 130, 139, 164, 183, 183),
'Argo Workflows + FluxCD': (161, 136, 181, 157, 141, 139, 157, 149, 151, 139, 139, 148, 152, 142, 136, 149, 160, 145, 173, 161)
}
# Wyliczenie średnich
labels = list(data.keys())
means = [statistics.mean(data[k]) for k in labels]
# Grupy indeksów do porównań
groupings = [
[0, 1, 2],
[3, 4, 5],
[6, 7, 8],
[0, 3, 6],
[1, 4, 7],
[2, 5, 8]
]
# Kolory z palety 'tab10'
color_palette = plt.get_cmap('tab10')
# Folder wyjściowy
output_folder = "plots"
os.makedirs(output_folder, exist_ok=True)
# Generowanie wykresów
for i, group in enumerate(groupings):
group_labels = [labels[j] for j in group]
group_means = [means[j] for j in group]
colors = [color_palette(j % 10) for j in group] # różne kolory
plt.figure()
bars = plt.bar(group_labels, group_means, color=colors)
# Oblicz maksymalną wartość, by zwiększyć wysokość osi Y
max_val = max(group_means)
plt.ylim(0, max_val * 1.15) # dodaj 15% zapasu na tekst
plt.ylabel("Średni czas wdrożenia (sek)")
plt.title(f"Porównanie średnich czasów wdrożenia")
plt.xticks(rotation=45)
# Dodanie wartości nad słupkami
for bar in bars:
yval = bar.get_height()
label = f'{yval:.1f}'.replace('.', ',') # <-- tutaj zamiana kropki na przecinek
plt.text(bar.get_x() + bar.get_width()/2.0, yval + max_val * 0.02, label,
ha='center', va='bottom', fontsize=9)
plt.tight_layout()
plt.savefig(f"{output_folder}/mean_times_{i}.png")
plt.close()
print("Wszystkie wykresy wygenerowane z dodatkowymi marginesami!")

54
deployment_timer.sh Executable file
View File

@ -0,0 +1,54 @@
#!/bin/bash
# === KONFIGURACJA ===
APP_URL="https://user-microservice.marcin00.pl/version"
MARKER_FILE="version_marker.txt"
OUTPUT_FILE="deployment_times.csv"
CHECK_INTERVAL=1 # sekundy
# === POBRANIE AKTUALNEJ WERSJI APLIKACJI ===
echo "[INFO] Pobieranie aktualnej wersji z /version..."
OLD_VERSION=$(curl -s "$APP_URL" | jq -r '.version')
if [[ -z "$OLD_VERSION" ]]; then
echo "[ERROR] Nie udało się pobrać aktualnej wersji aplikacji."
exit 1
fi
echo "[INFO] Aktualna wersja: $OLD_VERSION"
# === Modyfikacja pliku, commit i push ===
TIMESTAMP=$(date +%s)
echo "$TIMESTAMP" > "$MARKER_FILE"
git add "$MARKER_FILE"
git commit -m "Automatyczna zmiana: $TIMESTAMP"
START_TIME=$(date +%s)
echo "[INFO] Wykonuję git push..."
git push
if [[ $? -ne 0 ]]; then
echo "[ERROR] Push nie powiódł się."
exit 1
fi
echo "[INFO] Oczekiwanie na wdrożenie nowej wersji..."
# === Odpytywanie endpointa /version ===
while true; do
sleep $CHECK_INTERVAL
NEW_VERSION=$(curl -s "$APP_URL" | jq -r '.version')
if [[ "$NEW_VERSION" != "$OLD_VERSION" ]]; then
END_TIME=$(date +%s)
DURATION=$((END_TIME - START_TIME))
echo "[INFO] Nowa wersja wdrożona: $NEW_VERSION"
echo "[INFO] Czas wdrożenia: $DURATION sekund"
echo "$START_TIME,$END_TIME,$DURATION,$OLD_VERSION,$NEW_VERSION" >> "$OUTPUT_FILE"
break
else
echo "[WAIT] Czekam... ($NEW_VERSION)"
fi
done

View File

@ -7,9 +7,24 @@ services:
build: . build: .
env_file: env_file:
- api/.env - api/.env
ports:
- 80:80
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost/health"]
interval: 10s
timeout: 5s
retries: 5
start_period: 15s
db: db:
container_name: db container_name: db
hostname: db hostname: db
image: mysql:latest image: mysql:latest
env_file: env_file:
- db/.env - db/.env
ports:
- 3306:3306
healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
interval: 10s
timeout: 5s
retries: 5

View File

@ -1,8 +0,0 @@
port:
tcp:80:
listening: true
ip:
- 0.0.0.0
process:
python3:
running: true