diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000000..fe8a581226
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,99 @@
+# https://github.com/gitattributes/gitattributes/blob/fddc586cf0f10ec4485028d0d2dd6f73197a4258/Common.gitattributes
+# Common settings that generally should always be used with your language specific settings
+
+# Auto detect text files and perform LF normalization
+* text=auto
+
+#
+# The above will handle all files NOT found below
+#
+
+# Documents
+*.bibtex text diff=bibtex
+*.doc diff=astextplain
+*.DOC diff=astextplain
+*.docx diff=astextplain
+*.DOCX diff=astextplain
+*.dot diff=astextplain
+*.DOT diff=astextplain
+*.pdf diff=astextplain
+*.PDF diff=astextplain
+*.rtf diff=astextplain
+*.RTF diff=astextplain
+*.md text diff=markdown
+*.mdx text diff=markdown
+*.tex text diff=tex
+*.adoc text
+*.textile text
+*.mustache text
+*.csv text eol=crlf
+*.tab text
+*.tsv text
+*.txt text
+*.sql text
+*.epub diff=astextplain
+
+# Graphics
+*.png binary
+*.jpg binary
+*.jpeg binary
+*.gif binary
+*.tif binary
+*.tiff binary
+*.ico binary
+# SVG treated as text by default.
+*.svg text
+# If you want to treat it as binary,
+# use the following line instead.
+# *.svg binary
+*.eps binary
+
+# Scripts
+*.bash text eol=lf
+*.fish text eol=lf
+*.ksh text eol=lf
+*.sh text eol=lf
+*.zsh text eol=lf
+# These are explicitly windows files and should use crlf
+*.bat text eol=crlf
+*.cmd text eol=crlf
+*.ps1 text eol=crlf
+
+# Serialisation
+*.json text
+*.toml text
+*.xml text
+*.yaml text
+*.yml text
+
+# Archives
+*.7z binary
+*.bz binary
+*.bz2 binary
+*.bzip2 binary
+*.gz binary
+*.lz binary
+*.lzma binary
+*.rar binary
+*.tar binary
+*.taz binary
+*.tbz binary
+*.tbz2 binary
+*.tgz binary
+*.tlz binary
+*.txz binary
+*.xz binary
+*.Z binary
+*.zip binary
+*.zst binary
+
+# Text files where line endings should be preserved
+*.patch -text
+
+#
+# Exclude files from exporting
+#
+
+.gitattributes export-ignore
+.gitignore export-ignore
+.gitkeep export-ignore
diff --git a/.github/actions/python-setup/action.yml b/.github/actions/python-setup/action.yml
new file mode 100644
index 0000000000..25c87fa7c7
--- /dev/null
+++ b/.github/actions/python-setup/action.yml
@@ -0,0 +1,57 @@
+name: Python Poetry Setup
+description: Set up Python + Poetry, cache dependencies, and install project deps
+
+inputs:
+ python-version:
+ description: Python version to install
+ required: false
+ default: "3.14"
+ poetry-version:
+ description: Poetry version to install
+ required: false
+ default: "2.3.2"
+ working-directory:
+ description: Project directory containing pyproject.toml
+ required: false
+ default: "app_python"
+ lockfile-path:
+ description: Path to poetry.lock for cache key invalidation
+ required: false
+ default: "app_python/poetry.lock"
+ install-args:
+ description: Extra arguments passed to poetry install
+ required: false
+ default: "--with dev --no-interaction --no-ansi"
+
+runs:
+ using: composite
+ steps:
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ inputs.python-version }}
+
+ - name: Install Poetry
+ uses: snok/install-poetry@v1
+ with:
+ version: ${{ inputs.poetry-version }}
+
+ - name: Configure Poetry virtualenv location
+ shell: bash
+ working-directory: ${{ inputs.working-directory }}
+ run: poetry config virtualenvs.in-project true
+
+ - name: Cache Poetry dependencies
+ uses: actions/cache@v4
+ with:
+ path: |
+ ~/.cache/pypoetry
+ ${{ inputs.working-directory }}/.venv
+ key: ${{ runner.os }}-py${{ inputs.python-version }}-poetry${{ inputs.poetry-version }}-${{ hashFiles(inputs.lockfile-path) }}
+ restore-keys: |
+ ${{ runner.os }}-py${{ inputs.python-version }}-poetry${{ inputs.poetry-version }}-
+
+ - name: Install dependencies
+ shell: bash
+ working-directory: ${{ inputs.working-directory }}
+ run: poetry install ${{ inputs.install-args }}
diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml
new file mode 100644
index 0000000000..712fe39c42
--- /dev/null
+++ b/.github/workflows/python-ci.yml
@@ -0,0 +1,56 @@
+name: Python CI
+
+on:
+ push:
+ paths:
+ - app_python/**
+ - .github/actions/python-setup/**
+ - .github/workflows/python-ci.yml
+ pull_request:
+ branches:
+ - master
+ paths:
+ - app_python/**
+ - .github/actions/python-setup/**
+ - .github/workflows/python-ci.yml
+
+jobs:
+ test:
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: [3.14]
+ poetry-version: [2.3.2]
+ os: [ubuntu-latest]
+ runs-on: ${{ matrix.os }}
+ defaults:
+ run:
+ working-directory: ./app_python
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python tooling and dependencies
+ uses: ./.github/actions/python-setup
+ with:
+ python-version: ${{ matrix.python-version }}
+ poetry-version: ${{ matrix.poetry-version }}
+ working-directory: app_python
+ lockfile-path: app_python/poetry.lock
+ install-args: --with dev --no-interaction --no-ansi
+ - name: Lint with flake8
+ run: poetry run flake8 src tests
+ - name: Test using pytest with coverage report
+ run: |
+ mkdir -p test-results
+ poetry run pytest \
+ --junitxml=test-results/pytest-report.xml \
+ --cov=src \
+ --cov-report=term-missing \
+ --cov-report=xml:test-results/coverage.xml
+ - name: Upload pytest and coverage reports
+ if: always()
+ uses: actions/upload-artifact@v4
+ with:
+ name: python-test-reports
+ path: |
+ app_python/test-results/pytest-report.xml
+ app_python/test-results/coverage.xml
diff --git a/.github/workflows/python-docker.yml b/.github/workflows/python-docker.yml
new file mode 100644
index 0000000000..dde175a645
--- /dev/null
+++ b/.github/workflows/python-docker.yml
@@ -0,0 +1,82 @@
+name: Python Docker Publish
+
+on:
+ push:
+ branches:
+ - "lab*"
+ paths:
+ - app_python/**
+ - .github/workflows/python-docker.yml
+ pull_request:
+ branches:
+ - master
+ types:
+ - closed
+ paths:
+ - app_python/**
+ - .github/workflows/python-docker.yml
+
+jobs:
+ build-and-push-branch:
+ if: github.event_name == 'push'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Derive lab+sha tag from branch
+ id: version
+ run: |
+ source_branch="${{ github.ref_name }}"
+ if [[ "$source_branch" =~ ([0-9]+) ]]; then
+ lab_number="${BASH_REMATCH[1]}"
+ lab_number=$((10#$lab_number))
+ short_sha="${GITHUB_SHA::7}"
+ echo "branch_tag=1.${lab_number}.${short_sha}" >> "$GITHUB_OUTPUT"
+ else
+ echo "Failed to extract lab number from branch: $source_branch" >&2
+ exit 1
+ fi
+ - name: Log in to Docker Hub
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Build and push Docker image (branch)
+ uses: docker/build-push-action@v6
+ with:
+ context: ./app_python
+ file: ./app_python/Dockerfile
+ push: true
+ tags: |
+ ${{ secrets.DOCKERHUB_USERNAME }}/devops-app-py:${{ steps.version.outputs.branch_tag }}
+
+ build-and-push:
+ if: github.event.pull_request.merged == true
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Derive lab version tag from merged branch
+ id: version
+ run: |
+ source_branch="${{ github.event.pull_request.head.ref }}"
+ if [[ "$source_branch" =~ ([0-9]+) ]]; then
+ lab_number="${BASH_REMATCH[1]}"
+ lab_number=$((10#$lab_number))
+ echo "version_tag=1.${lab_number}" >> "$GITHUB_OUTPUT"
+ else
+ echo "Failed to extract lab number from merged branch: $source_branch" >&2
+ exit 1
+ fi
+ - name: Log in to Docker Hub
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v6
+ with:
+ context: ./app_python
+ file: ./app_python/Dockerfile
+ push: true
+ tags: |
+ ${{ secrets.DOCKERHUB_USERNAME }}/devops-app-py:${{ steps.version.outputs.version_tag }}
+ ${{ secrets.DOCKERHUB_USERNAME }}/devops-app-py:latest
diff --git a/.github/workflows/python-snyk.yml b/.github/workflows/python-snyk.yml
new file mode 100644
index 0000000000..c3297eccc1
--- /dev/null
+++ b/.github/workflows/python-snyk.yml
@@ -0,0 +1,37 @@
+name: Python Snyk Scan
+
+on:
+ push:
+ paths:
+ - app_python/**
+ - .github/actions/python-setup/**
+ - .github/workflows/python-snyk.yml
+ pull_request:
+ branches:
+ - master
+ paths:
+ - app_python/**
+ - .github/actions/python-setup/**
+ - .github/workflows/python-snyk.yml
+
+jobs:
+ snyk:
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ working-directory: ./app_python
+ steps:
+ - uses: actions/checkout@v4
+ - name: Setup Python tooling and dependencies
+ uses: ./.github/actions/python-setup
+ - name: Setup Snyk CLI
+ uses: snyk/actions/setup@master
+ - name: Run Snyk dependency scan (or skip)
+ env:
+ SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
+ run: |
+ if [ -z "${SNYK_TOKEN:-}" ]; then
+ echo "SNYK_TOKEN secret not set; skipping Snyk dependency scan."
+ exit 0
+ fi
+ snyk test --severity-threshold=high
diff --git a/.gitignore b/.gitignore
index 30d74d2584..67e6b52b28 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,31 @@
-test
\ No newline at end of file
+test
+
+# Terraform
+**/.terraform/*
+*.tfstate
+*.tfstate.*
+*.tfplan
+tfplan
+*.tfvars
+*.tfvars.json
+crash.log
+override.tf
+override.tf.json
+*_override.tf
+*_override.tf.json
+.terraform.lock.hcl
+
+# Pulumi
+.pulumi/
+pulumi/.venv/
+pulumi/venv/
+pulumi/Pulumi.*.yaml
+
+# Python caches
+__pycache__/
+*.py[cod]
+.pytest_cache/
+.mypy_cache/
+
+# IDE
+.vscode/
diff --git a/app_go/.dockerignore b/app_go/.dockerignore
new file mode 100644
index 0000000000..6f1931b7cd
--- /dev/null
+++ b/app_go/.dockerignore
@@ -0,0 +1,4 @@
+*
+!go.mod
+!go.sum
+!*.go
diff --git a/app_go/.gitignore b/app_go/.gitignore
new file mode 100644
index 0000000000..add20bc5eb
--- /dev/null
+++ b/app_go/.gitignore
@@ -0,0 +1,33 @@
+# https://github.com/github/gitignore/blob/53fee13f20a05efc93ef4edcad0c62863520e268/Go.gitignore
+# If you prefer the allow list template instead of the deny list, see community template:
+# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
+#
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+# Code coverage profiles and other test artifacts
+*.out
+coverage.*
+*.coverprofile
+profile.cov
+
+# Dependency directories (remove the comment below to include it)
+# vendor/
+
+# Go workspace file
+go.work
+go.work.sum
+
+# env file
+.env
+
+# Editor/IDE
+.idea/
+.vscode/
diff --git a/app_go/Dockerfile b/app_go/Dockerfile
new file mode 100644
index 0000000000..aa269d06e2
--- /dev/null
+++ b/app_go/Dockerfile
@@ -0,0 +1,13 @@
+FROM golang:1.25-alpine AS build
+WORKDIR /app
+# Uncomment for dependency installation
+# COPY go.mod go.sum ./
+# RUN go mod download
+COPY go.mod *.go ./
+RUN CGO_ENABLED=0 GOOS=linux go build -o devops-info-service.out
+
+FROM scratch
+COPY --from=build /app/devops-info-service.out /
+# Use UID:GID to avoid copying user info (/etc/passwd)
+USER 10001:10001
+CMD ["/devops-info-service.out"]
diff --git a/app_go/README.md b/app_go/README.md
new file mode 100644
index 0000000000..22a27fa787
--- /dev/null
+++ b/app_go/README.md
@@ -0,0 +1,30 @@
+# DevOps Info Service (Go)
+
+## Overview
+Simple Go web service that exposes system/runtime details and a health check.
+
+## Prerequisites
+- Go 1.25+
+
+## Build
+```bash
+go build -o devops-info-service.out .
+```
+
+## Run
+```bash
+./devops-info-service.out
+# Or with custom config
+HOST=127.0.0.1 PORT=8080 ./devops-info-service.out
+```
+
+## Endpoints
+- `GET /` - service + system + runtime + request info
+- `GET /health` - health check
+
+## Configuration
+
+| Variable | Default | Description |
+| --- | --- | --- |
+| `HOST` | `0.0.0.0` | Bind address for the server |
+| `PORT` | `5000` | Port to listen on |
diff --git a/app_go/build.sh b/app_go/build.sh
new file mode 100644
index 0000000000..2832311420
--- /dev/null
+++ b/app_go/build.sh
@@ -0,0 +1,2 @@
+#!/usr/bin/env bash
+go build -o devops-info-service.out .
diff --git a/app_go/docs/GO.md b/app_go/docs/GO.md
new file mode 100644
index 0000000000..3f4073e0d8
--- /dev/null
+++ b/app_go/docs/GO.md
@@ -0,0 +1,10 @@
+# Go Language Justification
+
+## Why Go
+
+I chose Go because it produces small, static binaries, compiles quickly, and has a minimal standard library that already covers HTTP servers. That makes it a good fit for a tiny service and for multiβstage Docker builds later in the course.
+
+## Tradeoffs
+
+- **Pros:** fast compile/run, simple deployment, good concurrency model, no runtime dependency chain.
+- **Cons:** less dynamic than Python for quick iteration, and JSON struct definitions add some boilerplate.
diff --git a/app_go/docs/LAB01.md b/app_go/docs/LAB01.md
new file mode 100644
index 0000000000..d80e7b00b9
--- /dev/null
+++ b/app_go/docs/LAB01.md
@@ -0,0 +1,42 @@
+# LAB01 β DevOps Info Service (Go)
+
+## Implementation Overview
+
+This Go version mirrors the Python service and exposes the same two endpoints using the standard `net/http` package.
+
+### Endpoints
+
+- `GET /` returns service, system, runtime, request info, and a list of endpoints.
+- `GET /health` returns a health status, timestamp, and uptime in seconds.
+
+### Runtime Behavior
+
+- **Uptime:** computed from a `startTime` set at process start.
+- **System info:** hostname via `os.Hostname`, OS/arch via `runtime`.
+- **Request info:** client IP from `X-Forwarded-For` or `RemoteAddr`.
+- **Errors:** JSON 404 for unknown paths and JSON 500 on panics (recovery middleware).
+
+## Build & Run
+
+```bash
+go build -o devops-info-service.out .
+./devops-info-service.out
+# Custom config
+HOST=127.0.0.1 PORT=8080 ./devops-info-service.out
+```
+
+## API Examples
+
+```bash
+curl -sS http://127.0.0.1:5000/ | jq
+curl -sS http://127.0.0.1:5000/health | jq
+```
+
+## Notes
+
+- `python_version` in the JSON is populated with the Go runtime version to keep the output shape identical to the Python app.
+- The advertised endpoint list is a static slice to match the Python output.
+
+## Screenshot
+
+
\ No newline at end of file
diff --git a/app_go/docs/LAB02.md b/app_go/docs/LAB02.md
new file mode 100644
index 0000000000..d906ab57fb
--- /dev/null
+++ b/app_go/docs/LAB02.md
@@ -0,0 +1,300 @@
+# LAB02 β Multi-Stage Docker Build (Go)
+
+## Multi-Stage Build Strategy
+
+The Go service is built with a two-stage Dockerfile:
+
+1. **Build stage (`golang:1.25-alpine`)**
+- Compiles the application binary with `CGO_ENABLED=0 GOOS=linux`.
+- Keeps compiler/toolchain in the build environment only.
+
+2. **Runtime stage (`scratch`)**
+- Copies only `devops-info-service.out` from the build stage.
+- Runs as non-root with `USER 10001:10001`.
+- Contains no package manager, shell, or compiler.
+
+Dockerfile used: `app_go/Dockerfile`
+
+```dockerfile
+FROM golang:1.25-alpine AS build
+WORKDIR /app
+COPY go.mod *.go ./
+RUN CGO_ENABLED=0 GOOS=linux go build -o devops-info-service.out
+
+FROM scratch
+COPY --from=build /app/devops-info-service.out /
+USER 10001:10001
+CMD ["/devops-info-service.out"]
+```
+
+Also, `.dockerignore` keeps context minimal:
+
+```dockerignore
+*
+!go.mod
+!go.sum
+!*.go
+```
+
+## Technical Explanation of Each Stage
+
+- **`FROM golang:1.25-alpine AS build`**
+ Provides Go toolchain and Alpine userspace needed to compile.
+- **`WORKDIR /app` + `COPY go.mod *.go ./`**
+ Copies only build inputs (module file and source files).
+- **`RUN CGO_ENABLED=0 GOOS=linux go build ...`**
+ Produces a Linux static binary suitable for `scratch` runtime.
+- **`FROM scratch`**
+ Starts an empty runtime image.
+- **`COPY --from=build ...`**
+ Transfers only the compiled artifact, not compilers or source.
+- **`USER 10001:10001`**
+ Drops root privileges in runtime.
+
+## Build Process (Terminal Output)
+
+
+π¨ Build target
+
+```log
+$ docker build --no-cache --progress=plain --target build -t lab02-go:builder .
+#0 building with "default" instance using docker driver
+
+#1 [internal] load build definition from Dockerfile
+#1 transferring dockerfile: 424B 0.0s done
+#1 DONE 0.1s
+
+#2 [internal] load metadata for docker.io/library/golang:1.25-alpine
+#2 DONE 1.0s
+
+#3 [internal] load .dockerignore
+#3 transferring context: 64B 0.0s done
+#3 DONE 0.0s
+
+#4 [internal] load build context
+#4 DONE 0.0s
+
+#5 [build 1/4] FROM docker.io/library/golang:1.25-alpine@sha256:f6751d823c26342f9506c03797d2527668d095b0a15f1862cddb4d927a7a4ced
+#5 resolve docker.io/library/golang:1.25-alpine@sha256:f6751d823c26342f9506c03797d2527668d095b0a15f1862cddb4d927a7a4ced 0.0s done
+#5 DONE 0.1s
+
+#6 [build 2/4] WORKDIR /app
+#6 CACHED
+
+#4 [internal] load build context
+#4 transferring context: 54B done
+#4 DONE 0.0s
+
+#7 [build 3/4] COPY go.mod *.go ./
+#7 DONE 0.1s
+
+#8 [build 4/4] RUN CGO_ENABLED=0 GOOS=linux go build -o devops-info-service.out
+#8 DONE 62.8s
+
+#9 exporting to image
+#9 exporting layers
+#9 exporting layers 7.4s done
+#9 exporting manifest sha256:f3e73461dd53d9f346f612d14a5d7db25b865b7aab912ba8d3cb89a098da0546
+#9 exporting manifest sha256:f3e73461dd53d9f346f612d14a5d7db25b865b7aab912ba8d3cb89a098da0546 0.0s done
+#9 exporting config sha256:06ba3662b02750d25c0817c4d26a4d0f77805f722bb6d60fa2b8c04b4308e480 0.0s done
+#9 exporting attestation manifest sha256:844a56a9b83102a634becbc82128fa16fd1c41bba4fd9f5c56cf7ed84ec0b2ad 0.0s done
+#9 exporting manifest list sha256:f2f7690814f0d4b01954394858a41285da9b7a2a425a2525c36f4f7dfe1577aa done
+#9 naming to docker.io/library/lab02-go:builder 0.0s done
+#9 unpacking to docker.io/library/lab02-go:builder
+#9 unpacking to docker.io/library/lab02-go:builder 1.7s done
+#9 DONE 9.4
+```
+
+
+
+
+π¨ Final multi-stage target
+
+```log
+$ docker build --no-cache --progress=plain -t lab02-go:final .
+#0 building with "default" instance using docker driver
+
+#1 [internal] load build definition from Dockerfile
+#1 transferring dockerfile: 424B 0.0s done
+#1 DONE 0.0s
+
+#2 [internal] load metadata for docker.io/library/golang:1.25-alpine
+#2 DONE 0.9s
+
+#3 [internal] load .dockerignore
+#3 transferring context: 64B done
+#3 DONE 0.0s
+
+#4 [internal] load build context
+#4 DONE 0.0s
+
+#5 [build 1/4] FROM docker.io/library/golang:1.25-alpine@sha256:f6751d823c26342f9506c03797d2527668d095b0a15f1862cddb4d927a7a4ced
+#5 resolve docker.io/library/golang:1.25-alpine@sha256:f6751d823c26342f9506c03797d2527668d095b0a15f1862cddb4d927a7a4ced 0.0s done
+#5 DONE 0.0s
+
+#6 [build 2/4] WORKDIR /app
+#6 CACHED
+
+#4 [internal] load build context
+#4 transferring context: 54B done
+#4 DONE 0.0s
+
+#7 [build 3/4] COPY go.mod *.go ./
+#7 DONE 0.1s
+
+#8 [build 4/4] RUN CGO_ENABLED=0 GOOS=linux go build -o devops-info-service.out
+#8 DONE 67.4s
+
+#9 [stage-1 1/1] COPY --from=build /app/devops-info-service.out /
+#9 DONE 0.1s
+
+#10 exporting to image
+#10 exporting layers
+#10 exporting layers 1.0s done
+#10 exporting manifest sha256:b3ddddd75de1b8fe87ecf287b479ae5804ae9b73e3c8c88b58553ae1e949d209
+#10 exporting manifest sha256:b3ddddd75de1b8fe87ecf287b479ae5804ae9b73e3c8c88b58553ae1e949d209 0.0s done
+#10 exporting config sha256:65c1bd7c8937841b2bb1e5d455bd1ec37dab85a4e0ac4eab15bf50d1fb61d19a done
+#10 exporting attestation manifest sha256:2c7f952e05e64da351b651ceb30a12d35c0304ef9fb21d7dd5089b365862464e 0.0s done
+#10 exporting manifest list sha256:2d3f56459e956a745bfe802d54a7f652677a6a993406ec23d7d0334f9ec99af5 0.0s done
+#10 naming to docker.io/library/lab02-go:final done
+#10 unpacking to docker.io/library/lab02-go:final
+#10 unpacking to docker.io/library/lab02-go:final 0.2s done
+#10 DONE 1.4s
+```
+
+
+
+## Working Containerized Application (Terminal Output)
+
+
+Server
+
+```bash
+$ docker run --rm -p 5000:5000 lab02-go:final
+2026/02/10 20:02:11 Application starting on 0.0.0.0:5000
+2026/02/10 20:02:27 Request: GET /
+2026/02/10 20:03:55 Request: GET /health
+```
+
+
+
+
+Client
+
+```json
+$ curl -sS 127.0.0.1:5000 | jq
+{
+ "service": {
+ "name": "devops-info-service",
+ "version": "1.0.0",
+ "description": "DevOps course info service",
+ "framework": "Go net/http"
+ },
+ "system": {
+ "hostname": "1208319f6a92",
+ "platform": "Linux",
+ "platform_version": "linux",
+ "architecture": "amd64",
+ "cpu_count": 1,
+ "python_version": "go1.25.7"
+ },
+ "runtime": {
+ "seconds": 15,
+ "human": "0 hours, 0 minutes"
+ },
+ "request": {
+ "client_ip": "172.17.0.1",
+ "user_agent": "curl/8.14.1",
+ "method": "GET",
+ "path": "/"
+ },
+ "endpoints": [
+ {
+ "path": "/",
+ "method": "GET",
+ "description": "Service information."
+ },
+ {
+ "path": "/health",
+ "method": "GET",
+ "description": "Health check endpoint."
+ }
+ ]
+}
+```
+
+```json
+$ curl -sS 127.0.0.1:5000/health | jq
+{
+ "status": "healthy",
+ "timestamp": "2026-02-10T20:03:55.538319+00:00",
+ "uptime_seconds": 104
+}
+```
+
+
+
+## Image Size Comparison and Analysis
+
+
+| Image | Image size |
+| ------------------------------------ | ------------ |
+| Builder (`lab02-go:builder`) | **85.50MiB** |
+| Final multi-stage (`lab02-go:final`) | **4.41MiB** |
+
+
+βοΈ Measuring command
+
+```bash
+docker inspect -f "{{ .Size }}" | numfmt --to=iec-i --format="%.2f"
+```
+
+
+Reduction from builder to final:
+- **94.84%** smaller
+- **19.39x** smaller runtime image
+
+These metrics come from the same `docker inspect` size source, so they are directly comparable.
+
+## Why Multi-Stage Builds Matter for Compiled Languages
+
+For Go (and similarly Rust/C/C++), the compiler and build toolchain are large and needed only at build time. Multi-stage builds let us:
+
+- Keep full SDK only in builder stage.
+- Ship only the compiled binary in runtime.
+- Reduce registry transfer and startup pull time.
+- Reduce operational footprint and patch surface in production.
+
+Without multi-stage, runtime image carries unnecessary build dependencies, increasing size and risk.
+
+## Security Implications (Smaller Attack Surface)
+
+Security improvements in this implementation:
+
+- `scratch` runtime has no shell/package manager/toolchain.
+- Non-root runtime user via `USER 10001:10001`.
+- Fewer filesystem artifacts (only binary), reducing exposure.
+
+Practical impact:
+
+- Fewer components to scan/patch.
+- Lower chance of post-exploitation tooling availability inside container.
+- Simpler SBOM/runtime dependency graph.
+
+## Trade-Offs and Decisions
+
+### Decisions made
+
+- **Chose `scratch`** for maximal size/security reduction.
+- **Used static build (`CGO_ENABLED=0`)** so binary runs in empty base image.
+- **Used numeric UID:GID (`10001:10001`)** because `scratch` has no user-management tools.
+
+### Trade-offs
+
+- `scratch` is harder to debug (no shell utilities).
+- No bundled CA certs/timezone data by default (important if app adds outbound TLS or timezone-sensitive logic later).
+- Builder-stage caching is currently simple; if dependencies grow, splitting module download and source copy can improve cache efficiency further.
+
+## Summary
+
+The multi-stage approach in `app_go/Dockerfile` produces a working, non-root runtime image and achieves major size reduction compared with keeping the full Go toolchain in the final image. The result is a materially smaller and safer production artifact while preserving application functionality.
diff --git a/app_go/docs/img/lab01go.png b/app_go/docs/img/lab01go.png
new file mode 100644
index 0000000000..42652a163c
Binary files /dev/null and b/app_go/docs/img/lab01go.png differ
diff --git a/app_go/go.mod b/app_go/go.mod
new file mode 100644
index 0000000000..ed5d7b4f3e
--- /dev/null
+++ b/app_go/go.mod
@@ -0,0 +1,3 @@
+module example.com/devops-info-service
+
+go 1.25
diff --git a/app_go/main.go b/app_go/main.go
new file mode 100644
index 0000000000..194aa657fc
--- /dev/null
+++ b/app_go/main.go
@@ -0,0 +1,286 @@
+// DevOps Info Service in Go.
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "log"
+ "net"
+ "net/http"
+ "os"
+ "runtime"
+ "strings"
+ "time"
+)
+
+const (
+ serviceName = "devops-info-service"
+ serviceVersion = "1.0.0"
+ serviceDescription = "DevOps course info service"
+ serviceFramework = "Go net/http"
+)
+
+type ServiceInfo struct {
+ Name string `json:"name"`
+ Version string `json:"version"`
+ Description string `json:"description"`
+ Framework string `json:"framework"`
+}
+
+type SystemInfo struct {
+ Hostname string `json:"hostname"`
+ Platform string `json:"platform"`
+ PlatformVersion string `json:"platform_version"`
+ Architecture string `json:"architecture"`
+ CPUCount int `json:"cpu_count"`
+ PythonVersion string `json:"python_version"`
+}
+
+type UptimeInfo struct {
+ Seconds int64 `json:"seconds"`
+ Human string `json:"human"`
+}
+
+type RequestInfo struct {
+ ClientIP string `json:"client_ip"`
+ UserAgent string `json:"user_agent"`
+ Method string `json:"method"`
+ Path string `json:"path"`
+}
+
+type EndpointInfo struct {
+ Path string `json:"path"`
+ Method string `json:"method"`
+ Description string `json:"description"`
+}
+
+type RootResponse struct {
+ Service ServiceInfo `json:"service"`
+ System SystemInfo `json:"system"`
+ Runtime UptimeInfo `json:"runtime"`
+ Request RequestInfo `json:"request"`
+ Endpoints []EndpointInfo `json:"endpoints"`
+}
+
+type HealthResponse struct {
+ Status string `json:"status"`
+ Timestamp string `json:"timestamp"`
+ UptimeSeconds int64 `json:"uptime_seconds"`
+}
+
+var (
+ // startTime is used for uptime calculations.
+ startTime = time.Now().UTC()
+ // endpoints is a static list used to mirror the Python app output.
+ endpoints = []EndpointInfo{
+ {Path: "/", Method: http.MethodGet, Description: "Service information."},
+ {Path: "/health", Method: http.MethodGet, Description: "Health check endpoint."},
+ }
+)
+
+// getServiceInfo returns static service metadata.
+func getServiceInfo() ServiceInfo {
+ return ServiceInfo{
+ Name: serviceName,
+ Version: serviceVersion,
+ Description: serviceDescription,
+ Framework: serviceFramework,
+ }
+}
+
+// getSystemInfo returns host and runtime information.
+func getSystemInfo() SystemInfo {
+ hostname, err := os.Hostname()
+ if err != nil {
+ hostname = "unknown"
+ }
+
+ return SystemInfo{
+ Hostname: hostname,
+ Platform: platformName(),
+ PlatformVersion: platformVersion(),
+ Architecture: runtime.GOARCH,
+ CPUCount: runtime.NumCPU(),
+ PythonVersion: runtime.Version(),
+ }
+}
+
+// platformName maps GOOS to a human-readable name.
+func platformName() string {
+ switch runtime.GOOS {
+ case "linux":
+ return "Linux"
+ case "windows":
+ return "Windows"
+ case "darwin":
+ return "Darwin"
+ default:
+ return runtime.GOOS
+ }
+}
+
+// platformVersion attempts to return a friendly OS version.
+func platformVersion() string {
+ switch runtime.GOOS {
+ case "linux":
+ if pretty := linuxPrettyName(); pretty != "" {
+ return pretty
+ }
+ case "windows":
+ if osName := os.Getenv("OS"); osName != "" {
+ return osName
+ }
+ }
+
+ return runtime.GOOS
+}
+
+// linuxPrettyName reads PRETTY_NAME from /etc/os-release if available.
+func linuxPrettyName() string {
+ data, err := os.ReadFile("/etc/os-release")
+ if err != nil {
+ return ""
+ }
+
+ for _, line := range strings.Split(string(data), "\n") {
+ line = strings.TrimSpace(line)
+ if strings.HasPrefix(line, "PRETTY_NAME=") {
+ value := strings.TrimPrefix(line, "PRETTY_NAME=")
+ return strings.Trim(value, "\"")
+ }
+ }
+
+ return ""
+}
+
+// getUptime returns elapsed time since startTime.
+func getUptime() UptimeInfo {
+ seconds := int64(time.Since(startTime).Seconds())
+ hours := seconds / 3600
+ minutes := (seconds % 3600) / 60
+
+ return UptimeInfo{
+ Seconds: seconds,
+ Human: fmt.Sprintf("%d hours, %d minutes", hours, minutes),
+ }
+}
+
+// getRequestInfo captures minimal request metadata.
+func getRequestInfo(r *http.Request) RequestInfo {
+ return RequestInfo{
+ ClientIP: clientIP(r),
+ UserAgent: r.Header.Get("User-Agent"),
+ Method: r.Method,
+ Path: r.URL.Path,
+ }
+}
+
+// clientIP attempts to derive the client IP from proxy headers or RemoteAddr.
+func clientIP(r *http.Request) string {
+ if forwarded := r.Header.Get("X-Forwarded-For"); forwarded != "" {
+ parts := strings.Split(forwarded, ",")
+ return strings.TrimSpace(parts[0])
+ }
+
+ host, _, err := net.SplitHostPort(r.RemoteAddr)
+ if err == nil {
+ return host
+ }
+
+ return r.RemoteAddr
+}
+
+// listEndpoints returns the advertised endpoints for the root response.
+func listEndpoints() []EndpointInfo {
+ return endpoints
+}
+
+// mainHandler serves GET /.
+func mainHandler(w http.ResponseWriter, r *http.Request) {
+ payload := RootResponse{
+ Service: getServiceInfo(),
+ System: getSystemInfo(),
+ Runtime: getUptime(),
+ Request: getRequestInfo(r),
+ Endpoints: listEndpoints(),
+ }
+
+ writeJSON(w, http.StatusOK, payload)
+}
+
+// healthHandler serves GET /health.
+func healthHandler(w http.ResponseWriter, r *http.Request) {
+ payload := HealthResponse{
+ Status: "healthy",
+ Timestamp: time.Now().UTC().Format("2006-01-02T15:04:05.000000-07:00"),
+ UptimeSeconds: getUptime().Seconds,
+ }
+
+ writeJSON(w, http.StatusOK, payload)
+}
+
+// notFound returns a JSON 404.
+func notFound(w http.ResponseWriter, r *http.Request) {
+ writeJSON(w, http.StatusNotFound, map[string]string{
+ "error": "Not Found",
+ "message": "Endpoint does not exist",
+ })
+}
+
+// router dispatches requests to handlers.
+func router(w http.ResponseWriter, r *http.Request) {
+ switch {
+ case r.URL.Path == "/" && r.Method == http.MethodGet:
+ mainHandler(w, r)
+ case r.URL.Path == "/health" && r.Method == http.MethodGet:
+ healthHandler(w, r)
+ default:
+ notFound(w, r)
+ }
+}
+
+// recoverMiddleware converts panics into JSON 500 responses.
+func recoverMiddleware(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ defer func() {
+ if err := recover(); err != nil {
+ log.Printf("panic: %v", err)
+ writeJSON(w, http.StatusInternalServerError, map[string]string{
+ "error": "Internal Server Error",
+ "message": "An unexpected error occurred",
+ })
+ }
+ }()
+ log.Printf("Request: %s %s", r.Method, r.URL.Path)
+ next.ServeHTTP(w, r)
+ })
+}
+
+// writeJSON serializes a payload with the given status code.
+func writeJSON(w http.ResponseWriter, status int, payload any) {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(status)
+ if err := json.NewEncoder(w).Encode(payload); err != nil {
+ log.Printf("encode error: %v", err)
+ }
+}
+
+func main() {
+ host := os.Getenv("HOST")
+ if host == "" {
+ host = "0.0.0.0"
+ }
+
+ port := os.Getenv("PORT")
+ if port == "" {
+ port = "5000"
+ }
+
+ addr := net.JoinHostPort(host, port)
+ log.Printf("Application starting on %s", addr)
+
+ handler := recoverMiddleware(http.HandlerFunc(router))
+ if err := http.ListenAndServe(addr, handler); err != nil {
+ log.Fatalf("server error: %v", err)
+ }
+}
diff --git a/app_python/.dockerignore b/app_python/.dockerignore
new file mode 100644
index 0000000000..511a810855
--- /dev/null
+++ b/app_python/.dockerignore
@@ -0,0 +1,4 @@
+*
+!src/**
+!pyproject.toml
+!poetry.lock
diff --git a/app_python/.flake8 b/app_python/.flake8
new file mode 100644
index 0000000000..63c477b455
--- /dev/null
+++ b/app_python/.flake8
@@ -0,0 +1,4 @@
+[flake8]
+max-line-length = 100
+max-complexity = 10
+exclude = .*,docs,*/__pycache__
diff --git a/app_python/.gitattributes b/app_python/.gitattributes
new file mode 100644
index 0000000000..e48505a0fe
--- /dev/null
+++ b/app_python/.gitattributes
@@ -0,0 +1,30 @@
+# https://github.com/gitattributes/gitattributes/blob/fddc586cf0f10ec4485028d0d2dd6f73197a4258/Python.gitattributes
+# Basic .gitattributes for a python repo.
+
+# Source files
+# ============
+*.pxd text diff=python
+*.py text diff=python
+*.py3 text diff=python
+*.pyw text diff=python
+*.pyx text diff=python
+*.pyz text diff=python
+*.pyi text diff=python
+
+# Binary files
+# ============
+*.db binary
+*.p binary
+*.pkl binary
+*.pickle binary
+*.pyc binary export-ignore
+*.pyo binary export-ignore
+*.pyd binary
+
+# Jupyter notebook
+*.ipynb text eol=lf
+
+# Note: .db, .p, and .pkl files are associated
+# with the python modules ``pickle``, ``dbm.*``,
+# ``shelve``, ``marshal``, ``anydbm``, & ``bsddb``
+# (among others).
diff --git a/app_python/.gitignore b/app_python/.gitignore
new file mode 100644
index 0000000000..c33866fe47
--- /dev/null
+++ b/app_python/.gitignore
@@ -0,0 +1,217 @@
+# https://github.com/github/gitignore/blob/53fee13f20a05efc93ef4edcad0c62863520e268/Python.gitignore
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[codz]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py.cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+# Pipfile.lock
+
+# UV
+# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# uv.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+# poetry.lock
+# poetry.toml
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
+# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
+# pdm.lock
+# pdm.toml
+.pdm-python
+.pdm-build/
+
+# pixi
+# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
+# pixi.lock
+# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
+# in the .venv directory. It is recommended not to include this directory in version control.
+.pixi
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# Redis
+*.rdb
+*.aof
+*.pid
+
+# RabbitMQ
+mnesia/
+rabbitmq/
+rabbitmq-data/
+
+# ActiveMQ
+activemq-data/
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.envrc
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+.idea/
+
+# Abstra
+# Abstra is an AI-powered process automation framework.
+# Ignore directories containing user credentials, local state, and settings.
+# Learn more at https://abstra.io/docs
+.abstra/
+
+# Visual Studio Code
+# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
+# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
+# and can be added to the global gitignore or merged into this file. However, if you prefer,
+# you could uncomment the following to ignore the entire vscode folder
+.vscode/
+
+# Ruff stuff:
+.ruff_cache/
+
+# PyPI configuration file
+.pypirc
+
+# Marimo
+marimo/_static/
+marimo/_lsp/
+__marimo__/
+
+# Streamlit
+.streamlit/secrets.toml
\ No newline at end of file
diff --git a/app_python/Dockerfile b/app_python/Dockerfile
new file mode 100644
index 0000000000..e9b02ed024
--- /dev/null
+++ b/app_python/Dockerfile
@@ -0,0 +1,23 @@
+FROM python:3.14-alpine
+
+ENV PYTHONDONTWRITEBYTECODE=1
+ENV PYTHONUNBUFFERED=1
+ENV POETRY_VERSION=2.3.2
+
+RUN pip install --no-cache-dir "poetry==$POETRY_VERSION" \
+ && addgroup appgroup \
+ && adduser --disabled-password --gecos "" --no-create-home -s /bin/sh appuser -G appgroup
+
+WORKDIR /app
+
+COPY pyproject.toml poetry.lock ./
+RUN poetry config virtualenvs.create false \
+ && poetry install --only main --no-interaction --no-ansi --no-root
+
+COPY src ./src
+
+ENV PORT=5000
+ENV HOST="0.0.0.0"
+
+USER appuser
+CMD ["sh", "-c", "gunicorn --bind ${HOST:-0.0.0.0}:${PORT:-5000} src.flask_instance:app"]
diff --git a/app_python/README.md b/app_python/README.md
new file mode 100644
index 0000000000..1979c45e82
--- /dev/null
+++ b/app_python/README.md
@@ -0,0 +1,85 @@
+# DevOps Info Service
+
+[](https://github.com/LocalT0aster/DevOps-Core-S26/actions/workflows/python-ci.yml)
+
+## Overview
+
+Small Flask web service that reports service metadata, system information, runtime uptime, and basic request details. Includes a simple health check endpoint for monitoring.
+
+## Prerequisites
+
+- Python 3.13+
+- Poetry
+
+## Installation
+
+```bash
+poetry install
+```
+
+### Docker
+
+- Pull the container:
+ ```bash
+ docker pull localt0aster/devops-app-py
+ ```
+- OR build the container yourself:
+ ```bash
+ docker build -t localt0aster/devops-app-py .
+ ```
+ The Docker build installs dependencies with:
+ ```bash
+ poetry install --only main --no-root
+ ```
+
+## Running the Application
+
+Production-style local run with Gunicorn:
+
+```bash
+poetry run gunicorn --bind 0.0.0.0:5000 src.flask_instance:app
+# Or with custom config
+HOST=127.0.0.1 PORT=8080 poetry run gunicorn --bind 127.0.0.1:8080 src.flask_instance:app
+```
+
+### Docker
+
+- Run the container:
+ ```bash
+ docker run -p 5000:5000 -e HOST="0.0.0.0" -d localt0aster/devops-app-py
+ ```
+
+## API Endpoints
+
+- `GET /` - Service and system information
+- `GET /health` - Health check
+
+## Configuration
+
+| Variable | Default | Description |
+| -------- | --------- | ---------------------------------------- |
+| `HOST` | `0.0.0.0` | Bind address for the server |
+| `PORT` | `5000` | Port to listen on |
+| `DEBUG` | `False` | Enable Flask debug mode (`true`/`false`) |
+
+## Testing
+
+The project uses `pytest` for unit tests.
+
+```bash
+poetry install --with dev
+poetry run pytest --cov=src --cov-report=term-missing
+```
+
+## Linting
+
+```bash
+poetry run flake8 src tests
+```
+
+Current test coverage includes:
+
+- `GET /` successful response schema and types
+- `GET /health` successful response schema and types
+- `404` JSON error handling for unknown routes
+- `500` JSON error handling for simulated internal failures
diff --git a/app_python/docs/LAB01.md b/app_python/docs/LAB01.md
new file mode 100644
index 0000000000..07894b7146
--- /dev/null
+++ b/app_python/docs/LAB01.md
@@ -0,0 +1,191 @@
+# LAB01 - DevOps Info Service (Python)
+
+## Framework Selection
+
+**Choice:** Flask
+**Why:** I did not know any Python web framework for APIs, and Flask felt simplest to start with. Most lab examples were in Flask, so it reduced friction.
+
+**Comparison (concise):**
+
+| Framework | Pros | Cons | Fit for this lab |
+| -------------- | ------------------------------------ | ------------------------------ | ----------------------------------------- |
+| Flask (chosen) | Minimal, easy to learn, flexible | Fewer batteries included | Best for a small info service |
+| FastAPI | Great typing, auto docs, async ready | Slightly more concepts upfront | Good, but extra overhead for a simple lab |
+| Django | Full stack, ORM, auth | Heavy for a tiny API | Overkill here |
+
+## Best Practices Applied
+
+Below are concrete examples from `app.py` and why they matter.
+
+1. **Configuration via env vars** - makes the service configurable without code changes.
+
+```python
+HOST = os.getenv("HOST", "0.0.0.0")
+PORT = int(os.getenv("PORT", 5000))
+DEBUG = os.getenv("DEBUG", "False").lower() == "true"
+```
+
+2. **Clear separation into helper functions** - keeps endpoints small and readable.
+
+```python
+def get_system_info() -> dict[str, str | int]:
+ ...
+
+def get_uptime():
+ ...
+```
+
+3. **Logging** - provides startup and request diagnostics.
+
+```python
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+)
+logger.info("Application starting...")
+```
+
+4. **Error handling** - consistent JSON errors for clients.
+
+```python
+@app.errorhandler(404)
+def not_found(error):
+ return jsonify({"error": "Not Found", "message": "Endpoint does not exist"}), 404
+```
+
+## API Documentation
+
+### Endpoints
+
+- `GET /` - service + system + runtime + request info
+- `GET /health` - health check
+
+### Example Requests
+
+```bash
+curl -sS http://127.0.0.1:5000/ | jq
+curl -sS http://127.0.0.1:5000/health | jq
+```
+
+### Example Responses
+
+`GET /`:
+
+```json
+{
+ "endpoints": [
+ {
+ "description": "Service information",
+ "method": "GET",
+ "path": "/"
+ },
+ {
+ "description": "Health check",
+ "method": "GET",
+ "path": "/health"
+ }
+ ],
+ "request": {
+ "client_ip": "127.0.0.1",
+ "method": "GET",
+ "path": "/",
+ "user_agent": "curl/8.18.0"
+ },
+ "runtime": {
+ "human": "0 hours, 6 minutes",
+ "seconds": 418
+ },
+ "service": {
+ "description": "DevOps course info service",
+ "framework": "Flask",
+ "name": "devops-info-service",
+ "version": "1.0.0"
+ },
+ "system": {
+ "architecture": "x86_64",
+ "cpu_count": 8,
+ "hostname": "aSUS-sTUFf-arch",
+ "platform": "Linux",
+ "platform_version": "Arch Linux",
+ "python_version": "3.14.2"
+ }
+}
+```
+
+`GET /health`:
+
+```json
+{
+ "status": "healthy",
+ "timestamp": "2026-01-28T20:04:59.234201+00:00",
+ "uptime_seconds": 426
+}
+```
+
+## Testing Evidence
+
+### Screenshot
+
+
+
+### Output
+
+```js
+$ curl -sS 127.0.0.1:3926 | jq
+{
+ "endpoints": [
+ {
+ "description": "Service information",
+ "method": "GET",
+ "path": "/"
+ },
+ {
+ "description": "Health check",
+ "method": "GET",
+ "path": "/health"
+ }
+ ],
+ "request": {
+ "client_ip": "127.0.0.1",
+ "method": "GET",
+ "path": "/",
+ "user_agent": "curl/8.18.0"
+ },
+ "runtime": {
+ "human": "0 hours, 6 minutes",
+ "seconds": 418
+ },
+ "service": {
+ "description": "DevOps course info service",
+ "framework": "Flask",
+ "name": "devops-info-service",
+ "version": "1.0.0"
+ },
+ "system": {
+ "architecture": "x86_64",
+ "cpu_count": 8,
+ "hostname": "aSUS-sTUFf-arch",
+ "platform": "Linux",
+ "platform_version": "Arch Linux",
+ "python_version": "3.14.2"
+ }
+}
+```
+
+```js
+$ curl -sS 127.0.0.1:3926/health | jq
+{
+ "status": "healthy",
+ "timestamp": "2026-01-28T20:04:59.234201+00:00",
+ "uptime_seconds": 426
+}
+```
+
+## Challenges & Solutions
+
+1. **Framework choice** - I went with Flask because I did not know any Python API framework and Flask looked simplest; the lab examples were already in Flask.
+2. **Listing endpoints dynamically** - I struggled with Flask's routing introspection; a StackOverflow snippet didn't work, and ChatGPT helped me craft a working approach.
+
+## GitHub Community
+
+Starring repositories helps me bookmark useful projects and signal appreciation, which improves visibility in open source. Following developers keeps me aware of what classmates and maintainers are doing, supporting collaboration and professional growth.
diff --git a/app_python/docs/LAB02.md b/app_python/docs/LAB02.md
new file mode 100644
index 0000000000..e650b5a669
--- /dev/null
+++ b/app_python/docs/LAB02.md
@@ -0,0 +1,288 @@
+# LAB02 - Docker Containerization (Python)
+
+## Docker Best Practices Applied
+
+1. **Pinned base image version** - guarantees repeatable builds and avoids unexpected upstream changes.
+
+```Dockerfile
+FROM python:3.14-alpine
+```
+
+2. **Non-root user** - reduces blast radius if the app is compromised.
+
+```Dockerfile
+RUN addgroup appgroup && adduser --disabled-password --gecos "" --no-create-home -s /bin/sh appuser -G appgroup
+USER appuser
+```
+
+3. **Layer caching for dependencies** - installing requirements before copying the full app keeps rebuilds fast when only code changes.
+
+```Dockerfile
+COPY requirements.txt .
+RUN pip install --no-cache-dir -r requirements.txt
+COPY . .
+```
+
+4. **Minimal build context via .dockerignore** - avoids sending unrelated files (venv, git, docs) to the build context.
+
+```dockerignore
+*
+!app.py
+!requirements.txt
+!tests/*
+```
+
+5. **No pip cache** - prevents leaving package download caches in the image.
+
+```Dockerfile
+RUN pip install --no-cache-dir -r requirements.txt
+```
+
+6. **Explicit workdir** - ensures all app files live under a single predictable path.
+
+```Dockerfile
+WORKDIR /app
+```
+
+## Image Information & Decisions
+
+**Base image chosen:** `python:3.14-alpine`
+
+**Why:**
+
+- Pinned Python version for reproducibility.
+- Alpine variant keeps the runtime small and reduces attack surface.
+- The app is pure-Python, so musl vs glibc compatibility is not an issue here.
+
+**Final image size:**
+
+```bash
+$ docker inspect -f "{{ .Size }}" localt0aster/devops-app-py:lab.2 | numfmt --to=iec-i --format="%.2f"
+22.82Mi
+```
+
+**Layer structure (top to bottom):**
+
+- Base image: Python runtime on Alpine.
+- User/group creation: creates `appuser` and drops root privileges.
+- Workdir: standardizes file locations.
+- Dependency layer: copy `requirements.txt`, then install dependencies.
+- App layer: copy remaining files into `/app`.
+- Cleanup: remove `requirements.txt` (runtime tidiness).
+- Runtime config: set `HOST` and `PORT` env vars.
+- Switch to non-root user and start app.
+
+**Optimization choices:**
+
+- Used Alpine for smaller base image.
+- Copied `requirements.txt` separately to maximize build cache hits.
+- Used `pip --no-cache-dir` to avoid cached wheel files.
+- `.dockerignore` reduces context size and speeds up builds.
+- Note: `RUN rm requirements.txt` in a separate layer does not reduce image size; it only removes it from the final filesystem view.
+
+## Build & Run Process
+
+
+π¨ Build output
+
+```log
+$ docker build --no-cache --progress=plain -t localt0aster/devops-app-py .
+#0 building with "default" instance using docker driver
+
+#1 [internal] load build definition from Dockerfile
+#1 transferring dockerfile: 369B done
+#1 DONE 0.0s
+
+#2 [internal] load metadata for docker.io/library/python:3.14-alpine
+#2 DONE 0.5s
+
+#3 [internal] load .dockerignore
+#3 transferring context: 77B done
+#3 DONE 0.0s
+
+#4 [1/7] FROM docker.io/library/python:3.14-alpine@sha256:faee120f7885a06fcc9677922331391fa690d911c020abb9e8025ff3d908e510
+#4 resolve docker.io/library/python:3.14-alpine@sha256:faee120f7885a06fcc9677922331391fa690d911c020abb9e8025ff3d908e510 0.0s done
+#4 CACHED
+
+#5 [internal] load build context
+#5 transferring context: 123B done
+#5 DONE 0.0s
+
+#6 [2/7] RUN addgroup appgroup && adduser --disabled-password --gecos "" --no-create-home -s /bin/sh appuser -G appgroup
+#6 DONE 0.3s
+
+#7 [3/7] WORKDIR /app
+#7 DONE 0.1s
+
+#8 [4/7] COPY requirements.txt .
+#8 DONE 0.1s
+
+#9 [5/7] RUN pip install --no-cache-dir -r requirements.txt
+#9 4.545 Collecting blinker==1.9.0 (from -r requirements.txt (line 1))
+#9 4.736 Downloading blinker-1.9.0-py3-none-any.whl.metadata (1.6 kB)
+#9 4.814 Collecting certifi==2026.1.4 (from -r requirements.txt (line 2))
+#9 4.855 Downloading certifi-2026.1.4-py3-none-any.whl.metadata (2.5 kB)
+#9 5.098 Collecting charset-normalizer==3.4.4 (from -r requirements.txt (line 3))
+#9 5.140 Downloading charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl.metadata (37 kB)
+#9 5.229 Collecting click==8.3.1 (from -r requirements.txt (line 4))
+#9 5.270 Downloading click-8.3.1-py3-none-any.whl.metadata (2.6 kB)
+#9 5.417 Collecting Flask==3.1.2 (from -r requirements.txt (line 5))
+#9 5.458 Downloading flask-3.1.2-py3-none-any.whl.metadata (3.2 kB)
+#9 5.517 Collecting idna==3.11 (from -r requirements.txt (line 6))
+#9 5.560 Downloading idna-3.11-py3-none-any.whl.metadata (8.4 kB)
+#9 5.608 Collecting itsdangerous==2.2.0 (from -r requirements.txt (line 7))
+#9 5.648 Downloading itsdangerous-2.2.0-py3-none-any.whl.metadata (1.9 kB)
+#9 5.711 Collecting Jinja2==3.1.6 (from -r requirements.txt (line 8))
+#9 5.752 Downloading jinja2-3.1.6-py3-none-any.whl.metadata (2.9 kB)
+#9 5.877 Collecting MarkupSafe==3.0.3 (from -r requirements.txt (line 9))
+#9 5.928 Downloading markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl.metadata (2.7 kB)
+#9 6.003 Collecting requests==2.32.5 (from -r requirements.txt (line 10))
+#9 6.048 Downloading requests-2.32.5-py3-none-any.whl.metadata (4.9 kB)
+#9 6.120 Collecting urllib3==2.6.3 (from -r requirements.txt (line 11))
+#9 6.160 Downloading urllib3-2.6.3-py3-none-any.whl.metadata (6.9 kB)
+#9 6.251 Collecting Werkzeug==3.1.5 (from -r requirements.txt (line 12))
+#9 6.296 Downloading werkzeug-3.1.5-py3-none-any.whl.metadata (4.0 kB)
+#9 6.399 Downloading blinker-1.9.0-py3-none-any.whl (8.5 kB)
+#9 6.440 Downloading certifi-2026.1.4-py3-none-any.whl (152 kB)
+#9 6.530 Downloading charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl (154 kB)
+#9 6.577 Downloading click-8.3.1-py3-none-any.whl (108 kB)
+#9 6.620 Downloading flask-3.1.2-py3-none-any.whl (103 kB)
+#9 6.662 Downloading idna-3.11-py3-none-any.whl (71 kB)
+#9 6.704 Downloading itsdangerous-2.2.0-py3-none-any.whl (16 kB)
+#9 6.743 Downloading jinja2-3.1.6-py3-none-any.whl (134 kB)
+#9 6.792 Downloading markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl (23 kB)
+#9 6.831 Downloading requests-2.32.5-py3-none-any.whl (64 kB)
+#9 6.871 Downloading urllib3-2.6.3-py3-none-any.whl (131 kB)
+#9 6.915 Downloading werkzeug-3.1.5-py3-none-any.whl (225 kB)
+#9 6.985 Installing collected packages: urllib3, MarkupSafe, itsdangerous, idna, click, charset-normalizer, certifi, blinker, Werkzeug, requests, Jinja2, Flask
+#9 8.701
+#9 8.709 Successfully installed Flask-3.1.2 Jinja2-3.1.6 MarkupSafe-3.0.3 Werkzeug-3.1.5 blinker-1.9.0 certifi-2026.1.4 charset-normalizer-3.4.4 click-8.3.1 idna-3.11 itsdangerous-2.2.0 requests-2.32.5 urllib3-2.6.3
+#9 8.710 WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager, possibly rendering your system unusable. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv. Use the --root-user-action option if you know what you are doing and want to suppress this warning.
+#9 9.000
+#9 9.000 [notice] A new release of pip is available: 25.3 -> 26.0.1
+#9 9.000 [notice] To update, run: pip install --upgrade pip
+#9 DONE 9.4s
+
+#10 [6/7] COPY . .
+#10 DONE 0.1s
+
+#11 [7/7] RUN rm requirements.txt
+#11 DONE 0.3s
+
+#12 exporting to image
+#12 exporting layers
+#12 exporting layers 1.4s done
+#12 exporting manifest sha256:0e08d9c814e82ba9bfc64ab9bffca15d59c52f63d1b9db264e10723bf23c2daf
+#12 exporting manifest sha256:0e08d9c814e82ba9bfc64ab9bffca15d59c52f63d1b9db264e10723bf23c2daf 0.0s done
+#12 exporting config sha256:89b3883bbcb401b8bc8aa815aef1cde31083c25f245921185ce4acae286a51fb 0.0s done
+#12 exporting attestation manifest sha256:fbcf722602c9bb0c149874e7052029e55cebf067e88f7448a0282f3b3fb1b926 0.0s done
+#12 exporting manifest list sha256:24ce3d2f1f6270cedba6257c73fd1b5105b821025b9e38f87798ca75fba493d7
+#12 exporting manifest list sha256:24ce3d2f1f6270cedba6257c73fd1b5105b821025b9e38f87798ca75fba493d7 0.0s done
+#12 naming to docker.io/localt0aster/devops-app-py:latest done
+#12 unpacking to docker.io/localt0aster/devops-app-py:latest
+#12 unpacking to docker.io/localt0aster/devops-app-py:latest 0.5s done
+#12 DONE 2.1s
+
+ 1 warning found (use docker --debug to expand):
+ - CopyIgnoredFile: Attempting to Copy file "." that is excluded by .dockerignore (line 6)
+```
+
+
+
+
+π Run output
+
+```log
+$ docker run -p 5000:5000 --rm localt0aster/devops-app-py
+2026-02-10 16:52:32,232 - __main__ - INFO - Application starting...
+ * Serving Flask app 'app'
+ * Debug mode: off
+2026-02-10 16:52:32,238 - werkzeug - INFO - WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.
+ * Running on all addresses (0.0.0.0)
+ * Running on http://127.0.0.1:5000
+ * Running on http://172.17.0.2:5000
+2026-02-10 16:52:32,239 - werkzeug - INFO - Press CTRL+C to quit
+2026-02-10 16:53:49,498 - werkzeug - INFO - 172.17.0.1 - - [10/Feb/2026 16:53:49] "GET / HTTP/1.1" 200 -
+```
+
+
+
+
+π°οΈ Endpoint test output
+
+```json
+$ curl -Ss 127.0.0.1:5000 | jq
+{
+ "endpoints": [
+ {
+ "description": "Service information",
+ "method": "GET",
+ "path": "/"
+ },
+ {
+ "description": "Health check",
+ "method": "GET",
+ "path": "/health"
+ }
+ ],
+ "request": {
+ "client_ip": "172.17.0.1",
+ "method": "GET",
+ "path": "/",
+ "user_agent": "curl/8.14.1"
+ },
+ "runtime": {
+ "human": "0 hours, 1 minutes",
+ "seconds": 86
+ },
+ "service": {
+ "description": "DevOps course info service",
+ "framework": "Flask",
+ "name": "devops-info-service",
+ "version": "1.0.0"
+ },
+ "system": {
+ "architecture": "x86_64",
+ "cpu_count": 1,
+ "hostname": "bd45062076fd",
+ "platform": "Linux",
+ "platform_version": "Alpine Linux v3.23",
+ "python_version": "3.14.3"
+ }
+}
+```
+
+
+
+Docker Hub repository URL:
+
+## Technical Analysis
+
+**Why this Dockerfile works:**
+
+- Dependencies are installed before application code, enabling Docker cache reuse.
+- Environment variables set defaults that match the Flask appβs config.
+- The `CMD` runs the app with `python app.py`, which is the same startup command as local development.
+- `USER appuser` prevents the Flask process from running as root.
+
+**What happens if you change the layer order:**
+
+- If `COPY . .` comes before `pip install`, any code change will invalidate the cache and force a full dependency reinstall.
+- If you install dependencies after copying everything, small edits trigger slower rebuilds.
+
+**Security considerations implemented:**
+
+- Non-root user for runtime.
+- Minimal base image reduces available tooling and attack surface.
+- Pinned base image version reduces supply-chain drift.
+
+**How .dockerignore improves the build:**
+
+- Less data sent to the Docker daemon means faster builds.
+- Prevents accidental inclusion of venvs, git history, and local artifacts.
+
+## Challenges & Solutions
+
+- **Debian to Alpine command differences.**
+ - Issue: `python:3.14-slim` (Debian) lab examples use `useradd/groupadd`, which donβt exist in `python:3.14-alpine`.
+ - Fix: use `addgroup` & `adduser`
diff --git a/app_python/docs/LAB03.md b/app_python/docs/LAB03.md
new file mode 100644
index 0000000000..5f12e800e1
--- /dev/null
+++ b/app_python/docs/LAB03.md
@@ -0,0 +1,124 @@
+# LAB03 - Continuous Integration (Python)
+
+## 1. Overview
+
+**Testing framework used:** `pytest`
+
+**Why this choice:**
+
+- concise assertions and clear failure output
+- fixtures simplify Flask test-client setup
+- `monkeypatch` enables controlled error-path testing
+
+**What is covered by tests:**
+
+- endpoint tests for `GET /` and `GET /health` (success + error behavior)
+- JSON schema/type assertions
+- helper/unit tests for runtime/platform/request metadata
+- entrypoint behavior test for `main.run()` argument wiring
+
+**Current CI trigger configuration:**
+
+- workflow files:
+ - `.github/workflows/python-ci.yml` (lint + tests + coverage reports)
+ - `.github/workflows/python-snyk.yml` (security scan)
+ - `.github/workflows/python-docker.yml` (container publish)
+- note: Docker login/build/push is intentionally separated into `python-docker.yml` rather than embedded in `python-ci.yml`.
+- triggers:
+ - CI/Snyk: `push` + `pull_request` with path filters
+ - Docker publish:
+ - branch pushes to `lab*` publish `1..`
+ - merged PRs to `master` publish `1.` + `latest`
+
+**Versioning strategy (SemVer/CalVer):**
+
+- SemVer-style lab release tags: `1.` + `latest`
+- lab number is extracted from merged branch name (example: `lab03` -> `1.3`)
+
+## 2. Workflow Evidence
+
+Provide links/terminal output for:
+
+- Tests passing locally (terminal output below)
+- Successful workflow run links (GitHub Actions):
+ - Python CI:
+ - Python Docker Publish:
+ - Python Snyk Scan:
+- Docker image on Docker Hub (links):
+ - Tags page:
+ - Example pushed tag (`1.3.d4ae1ce`):
+- Status badge in `app_python/README.md`:
+ -
+
+
+pytest output log
+
+```log
+$ poetry run pytest --cov=src --cov-report=term-missing
+========================= test session starts =========================
+platform linux -- Python 3.14.2, pytest-9.0.2, pluggy-1.6.0
+rootdir: /home/t0ast/Repos/DevOps-Core-S26/app_python
+configfile: pyproject.toml
+plugins: anyio-4.12.1, mock-3.15.1, cov-7.0.0
+collected 10 items
+
+tests/test_endpoints.py ..... [ 50%]
+tests/test_unit_helpers.py ..... [100%]
+
+=========================== tests coverage ============================
+___________ coverage: platform linux, python 3.14.2-final-0 ___________
+
+Name Stmts Miss Cover Missing
+-----------------------------------------------------
+src/flask_instance.py 7 0 100%
+src/main.py 10 0 100%
+src/router.py 60 0 100%
+-----------------------------------------------------
+TOTAL 77 0 100%
+========================= 10 passed in 0.06s ==========================
+```
+
+
+
+## 3. Best Practices Implemented
+
+- **Practice 1: Path-based trigger filtering**: avoids running Python CI when unrelated folders change.
+- **Practice 2: Lint + test stages in CI**: catches style and functional issues early.
+- **Practice 3: Coverage reporting in CI command**: makes test quality visible, not just pass/fail.
+- **Practice 4: Pipeline separation by concern**: test, security, and deploy concerns run independently for clearer failure diagnosis.
+- **Practice 5: Reusable setup action**: shared Python/Poetry setup is centralized in `.github/actions/python-setup/action.yml` to avoid duplication.
+- **Caching**: `actions/cache` stores `~/.cache/pypoetry` and `app_python/.venv` using a `poetry.lock`-based key.
+- **Snyk**: integrated via `snyk/actions/setup` + `snyk test --severity-threshold=high`.
+- **Snyk token handling**: workflow skips Snyk step if `SNYK_TOKEN` secret is missing.
+
+
+Snyk result (run #21961075835)
+
+```
+Testing /home/runner/work/DevOps-Core-S26/DevOps-Core-S26/app_python...
+
+Organization: localt0aster
+Package manager: poetry
+Target file: pyproject.toml
+Project name: devops-info-service
+Open source: no
+Project path: /home/runner/work/DevOps-Core-S26/DevOps-Core-S26/app_python
+Licenses: enabled
+
+β Tested 15 dependencies for known issues, no vulnerable paths found.
+```
+
+
+
+## 4. Key Decisions
+
+- **Versioning Strategy:** SemVer-style `1.` because releases happen once per lab and are easy to map back to coursework milestones.
+- **Docker Tags:** branch builds publish `1..`; merged lab releases publish `1.` and `latest`.
+- **Workflow Triggers:** path-filtered pushes/PRs for CI and Snyk, with container publishing gated on merged PRs to `master`.
+- **Test Coverage:** endpoint and helper logic are covered; launcher-only code is excluded with pragma.
+- **Snyk policy:** CI fails only for vulnerabilities at `high` severity or above.
+
+## 5. Challenges (Optional)
+
+- Moving from endpoint-only tests to helper-level unit tests increased meaningful coverage.
+- Local and CI environments may have different tool availability; Poetry-based commands are used for reproducibility.
diff --git a/app_python/docs/img/lab01.png b/app_python/docs/img/lab01.png
new file mode 100644
index 0000000000..5316f2f18a
Binary files /dev/null and b/app_python/docs/img/lab01.png differ
diff --git a/app_python/poetry.lock b/app_python/poetry.lock
new file mode 100644
index 0000000000..e4918ed237
--- /dev/null
+++ b/app_python/poetry.lock
@@ -0,0 +1,714 @@
+# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand.
+
+[[package]]
+name = "blinker"
+version = "1.9.0"
+description = "Fast, simple object-to-object and broadcast signaling"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"},
+ {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"},
+]
+
+[[package]]
+name = "certifi"
+version = "2026.1.4"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"},
+ {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.4"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"},
+ {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"},
+ {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"},
+ {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"},
+ {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"},
+ {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"},
+ {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"},
+ {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"},
+ {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"},
+ {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"},
+]
+
+[[package]]
+name = "click"
+version = "8.3.1"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+files = [
+ {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"},
+ {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["main", "dev"]
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\""}
+
+[[package]]
+name = "coverage"
+version = "7.13.4"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.10"
+groups = ["dev"]
+files = [
+ {file = "coverage-7.13.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fc31c787a84f8cd6027eba44010517020e0d18487064cd3d8968941856d1415"},
+ {file = "coverage-7.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a32ebc02a1805adf637fc8dec324b5cdacd2e493515424f70ee33799573d661b"},
+ {file = "coverage-7.13.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e24f9156097ff9dc286f2f913df3a7f63c0e333dcafa3c196f2c18b4175ca09a"},
+ {file = "coverage-7.13.4-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8041b6c5bfdc03257666e9881d33b1abc88daccaf73f7b6340fb7946655cd10f"},
+ {file = "coverage-7.13.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a09cfa6a5862bc2fc6ca7c3def5b2926194a56b8ab78ffcf617d28911123012"},
+ {file = "coverage-7.13.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:296f8b0af861d3970c2a4d8c91d48eb4dd4771bcef9baedec6a9b515d7de3def"},
+ {file = "coverage-7.13.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e101609bcbbfb04605ea1027b10dc3735c094d12d40826a60f897b98b1c30256"},
+ {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aa3feb8db2e87ff5e6d00d7e1480ae241876286691265657b500886c98f38bda"},
+ {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4fc7fa81bbaf5a02801b65346c8b3e657f1d93763e58c0abdf7c992addd81a92"},
+ {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:33901f604424145c6e9c2398684b92e176c0b12df77d52db81c20abd48c3794c"},
+ {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:bb28c0f2cf2782508a40cec377935829d5fcc3ad9a3681375af4e84eb34b6b58"},
+ {file = "coverage-7.13.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d107aff57a83222ddbd8d9ee705ede2af2cc926608b57abed8ef96b50b7e8f9"},
+ {file = "coverage-7.13.4-cp310-cp310-win32.whl", hash = "sha256:a6f94a7d00eb18f1b6d403c91a88fd58cfc92d4b16080dfdb774afc8294469bf"},
+ {file = "coverage-7.13.4-cp310-cp310-win_amd64.whl", hash = "sha256:2cb0f1e000ebc419632bbe04366a8990b6e32c4e0b51543a6484ffe15eaeda95"},
+ {file = "coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053"},
+ {file = "coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11"},
+ {file = "coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa"},
+ {file = "coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7"},
+ {file = "coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00"},
+ {file = "coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef"},
+ {file = "coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903"},
+ {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f"},
+ {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299"},
+ {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505"},
+ {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6"},
+ {file = "coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9"},
+ {file = "coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9"},
+ {file = "coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f"},
+ {file = "coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f"},
+ {file = "coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459"},
+ {file = "coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3"},
+ {file = "coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634"},
+ {file = "coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3"},
+ {file = "coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa"},
+ {file = "coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3"},
+ {file = "coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a"},
+ {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7"},
+ {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc"},
+ {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47"},
+ {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985"},
+ {file = "coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0"},
+ {file = "coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246"},
+ {file = "coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126"},
+ {file = "coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d"},
+ {file = "coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9"},
+ {file = "coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac"},
+ {file = "coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea"},
+ {file = "coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b"},
+ {file = "coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525"},
+ {file = "coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242"},
+ {file = "coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148"},
+ {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a"},
+ {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23"},
+ {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80"},
+ {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea"},
+ {file = "coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a"},
+ {file = "coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d"},
+ {file = "coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd"},
+ {file = "coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af"},
+ {file = "coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d"},
+ {file = "coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12"},
+ {file = "coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b"},
+ {file = "coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9"},
+ {file = "coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092"},
+ {file = "coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9"},
+ {file = "coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26"},
+ {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2"},
+ {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940"},
+ {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c"},
+ {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0"},
+ {file = "coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b"},
+ {file = "coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9"},
+ {file = "coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd"},
+ {file = "coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997"},
+ {file = "coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601"},
+ {file = "coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689"},
+ {file = "coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c"},
+ {file = "coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129"},
+ {file = "coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552"},
+ {file = "coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a"},
+ {file = "coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356"},
+ {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71"},
+ {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5"},
+ {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98"},
+ {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5"},
+ {file = "coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0"},
+ {file = "coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb"},
+ {file = "coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505"},
+ {file = "coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2"},
+ {file = "coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056"},
+ {file = "coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc"},
+ {file = "coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9"},
+ {file = "coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf"},
+ {file = "coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55"},
+ {file = "coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72"},
+ {file = "coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a"},
+ {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6"},
+ {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3"},
+ {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750"},
+ {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39"},
+ {file = "coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0"},
+ {file = "coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea"},
+ {file = "coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932"},
+ {file = "coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b"},
+ {file = "coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0"},
+ {file = "coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91"},
+]
+
+[package.extras]
+toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
+
+[[package]]
+name = "flake8"
+version = "7.3.0"
+description = "the modular source code checker: pep8 pyflakes and co"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"},
+ {file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"},
+]
+
+[package.dependencies]
+mccabe = ">=0.7.0,<0.8.0"
+pycodestyle = ">=2.14.0,<2.15.0"
+pyflakes = ">=3.4.0,<3.5.0"
+
+[[package]]
+name = "flask"
+version = "3.1.2"
+description = "A simple framework for building complex web applications."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c"},
+ {file = "flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87"},
+]
+
+[package.dependencies]
+blinker = ">=1.9.0"
+click = ">=8.1.3"
+itsdangerous = ">=2.2.0"
+jinja2 = ">=3.1.2"
+markupsafe = ">=2.1.1"
+werkzeug = ">=3.1.0"
+
+[package.extras]
+async = ["asgiref (>=3.2)"]
+dotenv = ["python-dotenv"]
+
+[[package]]
+name = "gunicorn"
+version = "25.0.3"
+description = "WSGI HTTP Server for UNIX"
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+files = [
+ {file = "gunicorn-25.0.3-py3-none-any.whl", hash = "sha256:aca364c096c81ca11acd4cede0aaeea91ba76ca74e2c0d7f879154db9d890f35"},
+ {file = "gunicorn-25.0.3.tar.gz", hash = "sha256:b53a7fff1a07b825b962af320554de44ae77a26abfa373711ff3f83d57d3506d"},
+]
+
+[package.dependencies]
+packaging = "*"
+
+[package.extras]
+eventlet = ["eventlet (>=0.40.3)"]
+gevent = ["gevent (>=24.10.1)"]
+http2 = ["h2 (>=4.1.0)"]
+setproctitle = ["setproctitle"]
+testing = ["coverage", "eventlet (>=0.40.3)", "gevent (>=24.10.1)", "h2 (>=4.1.0)", "httpx[http2]", "pytest", "pytest-asyncio", "pytest-cov", "uvloop (>=0.19.0)"]
+tornado = ["tornado (>=6.5.0)"]
+
+[[package]]
+name = "idna"
+version = "3.11"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"},
+ {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"},
+]
+
+[package.extras]
+all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
+
+[[package]]
+name = "iniconfig"
+version = "2.3.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.10"
+groups = ["dev"]
+files = [
+ {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"},
+ {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"},
+]
+
+[[package]]
+name = "itsdangerous"
+version = "2.2.0"
+description = "Safely pass data to untrusted environments and back."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
+ {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
+ {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.3"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"},
+ {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"},
+ {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"},
+ {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"},
+ {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"},
+ {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"},
+ {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"},
+ {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"},
+ {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"},
+ {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"},
+ {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"},
+ {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"},
+ {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"},
+ {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"},
+ {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"},
+ {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"},
+ {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"},
+ {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"},
+ {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"},
+ {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"},
+ {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"},
+ {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"},
+ {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"},
+ {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"},
+ {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"},
+ {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"},
+ {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"},
+ {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"},
+ {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"},
+ {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"},
+ {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"},
+ {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"},
+ {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"},
+ {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"},
+ {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"},
+ {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"},
+ {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"},
+ {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"},
+ {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"},
+ {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"},
+ {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"},
+ {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"},
+ {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"},
+ {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"},
+ {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"},
+ {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"},
+ {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"},
+ {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"},
+ {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"},
+ {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"},
+ {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"},
+ {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"},
+ {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"},
+ {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"},
+ {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"},
+ {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"},
+ {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"},
+ {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"},
+ {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"},
+ {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"},
+ {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"},
+ {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"},
+ {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"},
+ {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"},
+ {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"},
+ {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"},
+ {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"},
+ {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"},
+ {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"},
+]
+
+[[package]]
+name = "mccabe"
+version = "0.7.0"
+description = "McCabe checker, plugin for flake8"
+optional = false
+python-versions = ">=3.6"
+groups = ["dev"]
+files = [
+ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
+
+[[package]]
+name = "packaging"
+version = "26.0"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.8"
+groups = ["main", "dev"]
+files = [
+ {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"},
+ {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"},
+]
+
+[[package]]
+name = "pep8-naming"
+version = "0.15.1"
+description = "Check PEP-8 naming conventions, plugin for flake8"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pep8_naming-0.15.1-py3-none-any.whl", hash = "sha256:eb63925e7fd9e028c7f7ee7b1e413ec03d1ee5de0e627012102ee0222c273c86"},
+ {file = "pep8_naming-0.15.1.tar.gz", hash = "sha256:f6f4a499aba2deeda93c1f26ccc02f3da32b035c8b2db9696b730ef2c9639d29"},
+]
+
+[package.dependencies]
+flake8 = ">=5.0.0"
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
+ {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["coverage", "pytest", "pytest-benchmark"]
+
+[[package]]
+name = "pycodestyle"
+version = "2.14.0"
+description = "Python style guide checker"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"},
+ {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"},
+]
+
+[[package]]
+name = "pyflakes"
+version = "3.4.0"
+description = "passive checker of Python programs"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"},
+ {file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"},
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
+ {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
+]
+
+[package.extras]
+windows-terminal = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "pytest"
+version = "9.0.2"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.10"
+groups = ["dev"]
+files = [
+ {file = "pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b"},
+ {file = "pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11"},
+]
+
+[package.dependencies]
+colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
+iniconfig = ">=1.0.1"
+packaging = ">=22"
+pluggy = ">=1.5,<2"
+pygments = ">=2.7.2"
+
+[package.extras]
+dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-cov"
+version = "7.0.0"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861"},
+ {file = "pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1"},
+]
+
+[package.dependencies]
+coverage = {version = ">=7.10.6", extras = ["toml"]}
+pluggy = ">=1.2"
+pytest = ">=7"
+
+[package.extras]
+testing = ["process-tests", "pytest-xdist", "virtualenv"]
+
+[[package]]
+name = "requests"
+version = "2.32.5"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"},
+ {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset_normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "urllib3"
+version = "2.6.3"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"},
+ {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""]
+h2 = ["h2 (>=4,<5)"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""]
+
+[[package]]
+name = "werkzeug"
+version = "3.1.5"
+description = "The comprehensive WSGI web application library."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc"},
+ {file = "werkzeug-3.1.5.tar.gz", hash = "sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67"},
+]
+
+[package.dependencies]
+markupsafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog (>=2.3)"]
+
+[metadata]
+lock-version = "2.1"
+python-versions = ">=3.13"
+content-hash = "5bcb333e951818ca4706d50bae307ab22a95462b6e393691b1a6d0992e4ffc41"
diff --git a/app_python/pyproject.toml b/app_python/pyproject.toml
new file mode 100644
index 0000000000..1e8b7fba15
--- /dev/null
+++ b/app_python/pyproject.toml
@@ -0,0 +1,25 @@
+[tool.poetry]
+name = "devops-info-service"
+version = "0.1.0"
+description = ""
+authors = ["LocalT0aster"]
+readme = "README.md"
+packages = [{ include = "src" }]
+
+[tool.poetry.dependencies]
+python = ">=3.13"
+flask = ">=3.1.2,<4.0.0"
+requests = ">=2.32.5,<3.0.0"
+gunicorn = "^25.0.3"
+
+[build-system]
+requires = ["poetry-core>=2.0.0,<3.0.0"]
+build-backend = "poetry.core.masonry.api"
+
+[dependency-groups]
+dev = [
+ "pytest (>=9.0.2,<10.0.0)",
+ "pytest-cov (>=7.0.0,<8.0.0)",
+ "flake8 (>=7.3.0,<8.0.0)",
+ "pep8-naming (>=0.15.1,<0.16.0)"
+]
diff --git a/app_python/src/flask_instance.py b/app_python/src/flask_instance.py
new file mode 100644
index 0000000000..7f3267ba03
--- /dev/null
+++ b/app_python/src/flask_instance.py
@@ -0,0 +1,16 @@
+"""
+Flask app instance and shared process-level state.
+"""
+
+from datetime import datetime, timezone
+import logging
+
+from flask import Flask
+
+logging.basicConfig(
+ level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+)
+logger = logging.getLogger(__name__)
+
+app = Flask("DevOps Info Service")
+START_TIME = datetime.now(timezone.utc) # Application start time (UTC).
diff --git a/app_python/src/main.py b/app_python/src/main.py
new file mode 100644
index 0000000000..d2d7356067
--- /dev/null
+++ b/app_python/src/main.py
@@ -0,0 +1,27 @@
+"""
+DevOps Info Service
+Application runtime entrypoint.
+"""
+
+import os
+
+try:
+ from .flask_instance import app, logger
+ from . import router # noqa: F401
+except ImportError: # pragma: no cover - allows `python src/main.py`
+ from flask_instance import app, logger
+ import router # noqa: F401
+
+HOST = os.getenv("HOST", "0.0.0.0")
+PORT = int(os.getenv("PORT", 5000))
+DEBUG = os.getenv("DEBUG", "False").lower() == "true"
+
+
+def run() -> None:
+ """Run development server."""
+ logger.info("Application starting...")
+ app.run(host=HOST, port=PORT, debug=DEBUG)
+
+
+if __name__ == "__main__": # pragma: no cover
+ run()
diff --git a/app_python/src/router.py b/app_python/src/router.py
new file mode 100644
index 0000000000..994b1340e4
--- /dev/null
+++ b/app_python/src/router.py
@@ -0,0 +1,159 @@
+"""
+Route handlers and response helpers.
+"""
+
+from datetime import datetime, timezone
+import inspect
+from multiprocessing import cpu_count
+import platform
+import socket
+
+from flask import jsonify, request
+
+try:
+ from .flask_instance import START_TIME, app, logger
+except ImportError: # pragma: no cover - allows `python src/main.py`
+ from flask_instance import START_TIME, app, logger
+
+__version__ = "1.0.0"
+
+
+def get_service_info() -> dict[str, str]:
+ """Collect info about service."""
+ return {
+ "name": "devops-info-service",
+ "version": __version__,
+ "description": "DevOps course info service",
+ "framework": "Flask",
+ }
+
+
+def get_platform_info() -> dict[str, str | int]:
+ """Collect system information."""
+
+ def _platform_version() -> str:
+ """Return a human-friendly OS version string."""
+ match platform.system().lower():
+ case "linux":
+ return platform.freedesktop_os_release()["PRETTY_NAME"]
+ case "windows":
+ return f"{platform.system()} {platform.win32_ver()[1]}"
+ case _:
+ return platform.version()
+
+ return {
+ "hostname": socket.gethostname(),
+ "platform": platform.system(),
+ "platform_version": _platform_version(),
+ "architecture": platform.machine(),
+ "cpu_count": cpu_count(),
+ "python_version": platform.python_version(),
+ }
+
+
+def get_uptime() -> dict[str, str | int]:
+ """Return uptime in seconds and a simple human string."""
+ delta = datetime.now(tz=timezone.utc) - START_TIME
+ up_seconds = int(delta.total_seconds())
+ up_hours = up_seconds // 3600
+ up_minutes = (up_seconds % 3600) // 60
+ return {
+ "seconds": up_seconds,
+ "human": f"{up_hours} hours, {up_minutes} minutes",
+ }
+
+
+def get_runtime() -> dict[str, str | int]:
+ """Return current runtime metadata (uptime + UTC timestamp)."""
+ up = get_uptime()
+ return {
+ "uptime_seconds": up["seconds"],
+ "uptime_human": up["human"],
+ "current_time": datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
+ "timezone": "UTC",
+ }
+
+
+def get_request_info(req) -> dict[str, str | None]:
+ """Return basic request metadata for debugging/telemetry."""
+ return {
+ "client_ip": req.remote_addr,
+ "user_agent": req.headers.get("User-Agent"),
+ "method": req.method,
+ "path": req.path,
+ }
+
+
+def list_routes() -> list[dict[str, str]]:
+ """Return a flat list of route + method + description."""
+ out: list[dict[str, str]] = []
+
+ for rule in sorted(app.url_map.iter_rules(), key=lambda r: (r.rule, r.endpoint)):
+ if rule.endpoint == "static":
+ continue
+
+ view = app.view_functions.get(rule.endpoint)
+
+ desc = ""
+ if view is not None:
+ desc = inspect.getdoc(view) or ""
+ desc = desc.splitlines()[0].strip() or ""
+
+ for method in sorted(rule.methods - {"HEAD", "OPTIONS"}):
+ out.append(
+ {
+ "path": rule.rule,
+ "method": method,
+ "description": desc,
+ }
+ )
+ return out
+
+
+@app.route("/")
+def index():
+ """Service information."""
+ logger.debug("Request: %s %s", request.method, request.path)
+ return jsonify(
+ {
+ "service": get_service_info(),
+ "system": get_platform_info(),
+ "runtime": get_uptime(),
+ "request": get_request_info(request),
+ "endpoints": list_routes(),
+ }
+ )
+
+
+@app.route("/health")
+def health():
+ """Health check."""
+ logger.debug("Request: %s %s", request.method, request.path)
+ return jsonify(
+ {
+ "status": "healthy",
+ "timestamp": datetime.now(timezone.utc).isoformat(),
+ "uptime_seconds": get_uptime()["seconds"],
+ }
+ )
+
+
+@app.errorhandler(404)
+def not_found(error): # noqa: ARG001
+ """Return a JSON 404 payload."""
+ logger.debug("Request: %s %s", request.method, request.path)
+ return jsonify({"error": "Not Found", "message": "Endpoint does not exist"}), 404
+
+
+@app.errorhandler(500)
+def internal_error(error): # noqa: ARG001
+ """Return a JSON 500 payload."""
+ return (
+ jsonify(
+ {
+ "error": "Internal Server Error",
+ "message": "An unexpected error occurred",
+ }
+ ),
+ 500,
+ )
diff --git a/app_python/tests/__init__.py b/app_python/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/app_python/tests/conftest.py b/app_python/tests/conftest.py
new file mode 100644
index 0000000000..eef52e6a63
--- /dev/null
+++ b/app_python/tests/conftest.py
@@ -0,0 +1,14 @@
+"""Shared pytest fixtures for app endpoint tests."""
+
+import pytest
+
+from src.flask_instance import app
+import src.router # noqa: F401 # Ensure route decorators are loaded.
+
+
+@pytest.fixture()
+def client():
+ """Return a Flask test client without starting a real HTTP server."""
+ app.config.update(TESTING=True, PROPAGATE_EXCEPTIONS=False)
+ with app.test_client() as test_client:
+ yield test_client
diff --git a/app_python/tests/test_endpoints.py b/app_python/tests/test_endpoints.py
new file mode 100644
index 0000000000..d0aca40338
--- /dev/null
+++ b/app_python/tests/test_endpoints.py
@@ -0,0 +1,118 @@
+"""Unit tests for HTTP endpoints and error handling."""
+
+from datetime import datetime
+
+import src.router as router
+
+
+def _raise_runtime_error() -> None:
+ raise RuntimeError("simulated failure")
+
+
+def test_index_returns_expected_json_structure_and_types(client):
+ """GET / should return the expected nested schema with stable field types."""
+ response = client.get(
+ "/",
+ headers={"User-Agent": "pytest-suite/1.0"},
+ environ_overrides={"REMOTE_ADDR": "203.0.113.7"},
+ )
+
+ assert response.status_code == 200
+ payload = response.get_json()
+ assert payload is not None
+
+ assert {"service", "system", "runtime", "request", "endpoints"} <= payload.keys()
+
+ service = payload["service"]
+ assert service["name"] == "devops-info-service"
+ assert service["framework"] == "Flask"
+ assert isinstance(service["version"], str)
+ assert isinstance(service["description"], str)
+
+ system = payload["system"]
+ assert isinstance(system["hostname"], str)
+ assert system["hostname"]
+ assert isinstance(system["platform"], str)
+ assert isinstance(system["platform_version"], str)
+ assert isinstance(system["architecture"], str)
+ assert isinstance(system["cpu_count"], int)
+ assert system["cpu_count"] >= 1
+ assert isinstance(system["python_version"], str)
+
+ runtime = payload["runtime"]
+ assert isinstance(runtime["seconds"], int)
+ assert runtime["seconds"] >= 0
+ assert isinstance(runtime["human"], str)
+
+ request = payload["request"]
+ assert request["client_ip"] == "203.0.113.7"
+ assert request["user_agent"] == "pytest-suite/1.0"
+ assert request["method"] == "GET"
+ assert request["path"] == "/"
+
+ endpoints = payload["endpoints"]
+ assert isinstance(endpoints, list)
+ assert endpoints
+ for endpoint in endpoints:
+ assert {"path", "method", "description"} <= endpoint.keys()
+ assert isinstance(endpoint["path"], str)
+ assert isinstance(endpoint["method"], str)
+ assert isinstance(endpoint["description"], str)
+
+ route_index = {(endpoint["method"], endpoint["path"]) for endpoint in endpoints}
+ assert ("GET", "/") in route_index
+ assert ("GET", "/health") in route_index
+
+
+def test_health_returns_expected_json_structure_and_types(client):
+ """GET /health should report healthy status and typed runtime metadata."""
+ response = client.get("/health")
+
+ assert response.status_code == 200
+ payload = response.get_json()
+ assert payload is not None
+
+ assert {"status", "timestamp", "uptime_seconds"} <= payload.keys()
+ assert payload["status"] == "healthy"
+ assert isinstance(payload["uptime_seconds"], int)
+ assert payload["uptime_seconds"] >= 0
+
+ parsed_timestamp = datetime.fromisoformat(payload["timestamp"])
+ assert parsed_timestamp.tzinfo is not None
+
+
+def test_unknown_endpoint_returns_json_404(client):
+ """Unknown routes should be handled by JSON 404 error handler."""
+ response = client.get("/definitely-does-not-exist")
+
+ assert response.status_code == 404
+ assert response.get_json() == {
+ "error": "Not Found",
+ "message": "Endpoint does not exist",
+ }
+
+
+def test_index_returns_json_500_when_platform_probe_fails(client, monkeypatch):
+ """GET / should return JSON 500 when an internal helper crashes."""
+ monkeypatch.setattr(router, "get_platform_info", _raise_runtime_error)
+
+ response = client.get("/")
+
+ assert response.status_code == 500
+ assert response.get_json() == {
+ "error": "Internal Server Error",
+ "message": "An unexpected error occurred",
+ }
+
+
+def test_health_returns_json_500_when_uptime_probe_fails(client, monkeypatch):
+ """GET /health should return JSON 500 when uptime collection crashes."""
+ monkeypatch.setattr(router, "get_uptime", _raise_runtime_error)
+
+ response = client.get("/health")
+
+ assert response.status_code == 500
+ assert response.get_json() == {
+ "error": "Internal Server Error",
+ "message": "An unexpected error occurred",
+ }
diff --git a/app_python/tests/test_unit_helpers.py b/app_python/tests/test_unit_helpers.py
new file mode 100644
index 0000000000..a17f071a61
--- /dev/null
+++ b/app_python/tests/test_unit_helpers.py
@@ -0,0 +1,93 @@
+"""Unit tests for helper functions and app entrypoint behavior."""
+
+from datetime import datetime
+from unittest.mock import Mock
+
+from flask import request
+
+from src.flask_instance import app
+import src.main as main
+import src.router as router
+
+
+def test_run_calls_flask_app_with_configured_host_port_debug(monkeypatch):
+ """main.run should log startup and pass module config into app.run."""
+ run_mock = Mock()
+ info_mock = Mock()
+
+ monkeypatch.setattr(main, "HOST", "127.0.0.1")
+ monkeypatch.setattr(main, "PORT", 5050)
+ monkeypatch.setattr(main, "DEBUG", True)
+ monkeypatch.setattr(main.app, "run", run_mock)
+ monkeypatch.setattr(main.logger, "info", info_mock)
+
+ main.run()
+
+ info_mock.assert_called_once_with("Application starting...")
+ run_mock.assert_called_once_with(host="127.0.0.1", port=5050, debug=True)
+
+
+def test_get_runtime_maps_uptime_payload(monkeypatch):
+ """get_runtime should map uptime fields and produce UTC timestamp text."""
+ monkeypatch.setattr(
+ router,
+ "get_uptime",
+ lambda: {"seconds": 42, "human": "0 hours, 0 minutes"},
+ )
+
+ runtime = router.get_runtime()
+
+ assert runtime["uptime_seconds"] == 42
+ assert runtime["uptime_human"] == "0 hours, 0 minutes"
+ assert runtime["timezone"] == "UTC"
+ assert runtime["current_time"].endswith("Z")
+ datetime.strptime(runtime["current_time"], "%Y-%m-%dT%H:%M:%SZ")
+
+
+def test_get_platform_info_windows_platform_version_branch(monkeypatch):
+ """Windows branch should format platform_version from win32 metadata."""
+ monkeypatch.setattr(router.platform, "system", lambda: "Windows")
+ monkeypatch.setattr(router.platform, "win32_ver", lambda: ("", "11", "", ""))
+ monkeypatch.setattr(router.platform, "machine", lambda: "AMD64")
+ monkeypatch.setattr(router.platform, "python_version", lambda: "3.14.2")
+ monkeypatch.setattr(router.socket, "gethostname", lambda: "test-host")
+ monkeypatch.setattr(router, "cpu_count", lambda: 8)
+
+ payload = router.get_platform_info()
+
+ assert payload["platform"] == "Windows"
+ assert payload["platform_version"] == "Windows 11"
+ assert payload["hostname"] == "test-host"
+ assert payload["cpu_count"] == 8
+
+
+def test_get_platform_info_default_platform_version_branch(monkeypatch):
+ """Non-Linux and non-Windows branch should use platform.version()."""
+ monkeypatch.setattr(router.platform, "system", lambda: "Darwin")
+ monkeypatch.setattr(router.platform, "version", lambda: "Darwin Kernel 25.0")
+ monkeypatch.setattr(router.platform, "machine", lambda: "arm64")
+ monkeypatch.setattr(router.platform, "python_version", lambda: "3.14.2")
+ monkeypatch.setattr(router.socket, "gethostname", lambda: "mac-host")
+ monkeypatch.setattr(router, "cpu_count", lambda: 10)
+
+ payload = router.get_platform_info()
+
+ assert payload["platform"] == "Darwin"
+ assert payload["platform_version"] == "Darwin Kernel 25.0"
+
+
+def test_get_request_info_returns_none_when_user_agent_missing():
+ """Missing User-Agent header should map to None without crashing."""
+ with app.test_request_context(
+ "/diagnostic",
+ method="POST",
+ environ_base={"REMOTE_ADDR": "198.51.100.9"},
+ ):
+ info = router.get_request_info(request)
+
+ assert info == {
+ "client_ip": "198.51.100.9",
+ "user_agent": None,
+ "method": "POST",
+ "path": "/diagnostic",
+ }
diff --git a/docker/provision_vm.sh b/docker/provision_vm.sh
new file mode 100755
index 0000000000..b79bfd3c4d
--- /dev/null
+++ b/docker/provision_vm.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+: "${VM_USER:?VM_USER must be set}"
+: "${SSH_PUBLIC_KEY:?SSH_PUBLIC_KEY must be set}"
+
+export DEBIAN_FRONTEND=noninteractive
+
+if ! command -v sshd >/dev/null 2>&1; then
+ apt-get update
+ apt-get install -y --no-install-recommends openssh-server ca-certificates
+fi
+
+id -u "${VM_USER}" >/dev/null 2>&1 || useradd -m -s /bin/bash "${VM_USER}"
+install -d -m 700 -o "${VM_USER}" -g "${VM_USER}" "/home/${VM_USER}/.ssh"
+printf '%s\n' "${SSH_PUBLIC_KEY}" >"/home/${VM_USER}/.ssh/authorized_keys"
+chown "${VM_USER}:${VM_USER}" "/home/${VM_USER}/.ssh/authorized_keys"
+chmod 600 "/home/${VM_USER}/.ssh/authorized_keys"
+
+mkdir -p /run/sshd
+cat >/etc/ssh/sshd_config.d/lab04.conf < container `22`
+ - HTTP: host `0.0.0.0:80` -> container `80`
+ - App: host `0.0.0.0:5000` -> container `5000`
+- Public IP equivalent: `127.0.0.1`.
+- Cost: `$0`.
+
+## 2. Terraform (OpenTofu) Implementation
+
+- CLI used: OpenTofu `v1.10.9` (Terraform-compatible HCL).
+- Project path: `terraform/`.
+- Main files:
+ - `versions.tf`: provider + required version.
+ - `main.tf`: network + Ubuntu VM container + startup bootstrap for SSH service + published ports.
+ - `variables.tf`: bind IPs, host ports, labels.
+ - `outputs.tf`: endpoints and connection commands.
+- Project structure: split into `versions.tf` (providers), `variables.tf` (inputs), `main.tf` (resources), and `outputs.tf` (connection/output values) for readability and predictable diffs.
+
+### Key Decisions
+
+- Used Ubuntu image directly to keep `apply` simple and avoid local custom image build failures.
+- Used startup bootstrap in a separate shell script (`docker/provision_vm.sh`) to avoid duplicated provisioning logic across Terraform and Pulumi.
+- Kept `80` and `5000` port mappings defined in IaC, but did not run mock HTTP services in the container.
+- Bound SSH to `127.0.0.1` by default to reduce exposure.
+
+### Challenges
+
+- Provider download from registry/GitHub release assets may timeout on slow links.
+- Workaround: local plugin mirror (`~/.terraform.d/plugins`) if direct provider install fails.
+
+### Command Output
+
+
+`tofu plan`
+
+```
+$ tofu plan
+
+OpenTofu used the selected providers to generate the following execution plan. Resource
+actions are indicated with the following symbols:
+ + create
+
+OpenTofu will perform the following actions:
+
+ # docker_container.vm will be created
+ + resource "docker_container" "vm" {
+ + attach = false
+ + bridge = (known after apply)
+ + command = [
+ + "/bin/bash",
+ + "-lc",
+ + <<-EOT
+ #!/usr/bin/env bash
+
+ set -euo pipefail
+
+ : "${VM_USER:?VM_USER must be set}"
+ : "${SSH_PUBLIC_KEY:?SSH_PUBLIC_KEY must be set}"
+
+ export DEBIAN_FRONTEND=noninteractive
+
+ if ! command -v sshd >/dev/null 2>&1; then
+ apt-get update
+ apt-get install -y --no-install-recommends openssh-server ca-certificates
+ fi
+
+ id -u "${VM_USER}" >/dev/null 2>&1 || useradd -m -s /bin/bash "${VM_USER}"
+ install -d -m 700 -o "${VM_USER}" -g "${VM_USER}" "/home/${VM_USER}/.ssh"
+ printf '%s\n' "${SSH_PUBLIC_KEY}" >"/home/${VM_USER}/.ssh/authorized_keys"
+ chown "${VM_USER}:${VM_USER}" "/home/${VM_USER}/.ssh/authorized_keys"
+ chmod 600 "/home/${VM_USER}/.ssh/authorized_keys"
+
+ mkdir -p /run/sshd
+ cat >/etc/ssh/sshd_config.d/lab04.conf <
+
+
+`tofu apply`
+
+```
+$ tofu apply
+
+OpenTofu used the selected providers to generate the following execution plan. Resource
+actions are indicated with the following symbols:
+ + create
+
+OpenTofu will perform the following actions:
+
+ # docker_container.vm will be created
+ + resource "docker_container" "vm" {
+ + attach = false
+ + bridge = (known after apply)
+ + command = [
+ + "/bin/bash",
+ + "-lc",
+ + <<-EOT
+ #!/usr/bin/env bash
+
+ set -euo pipefail
+
+ : "${VM_USER:?VM_USER must be set}"
+ : "${SSH_PUBLIC_KEY:?SSH_PUBLIC_KEY must be set}"
+
+ export DEBIAN_FRONTEND=noninteractive
+
+ if ! command -v sshd >/dev/null 2>&1; then
+ apt-get update
+ apt-get install -y --no-install-recommends openssh-server ca-certificates
+ fi
+
+ id -u "${VM_USER}" >/dev/null 2>&1 || useradd -m -s /bin/bash "${VM_USER}"
+ install -d -m 700 -o "${VM_USER}" -g "${VM_USER}" "/home/${VM_USER}/.ssh"
+ printf '%s\n' "${SSH_PUBLIC_KEY}" >"/home/${VM_USER}/.ssh/authorized_keys"
+ chown "${VM_USER}:${VM_USER}" "/home/${VM_USER}/.ssh/authorized_keys"
+ chmod 600 "/home/${VM_USER}/.ssh/authorized_keys"
+
+ mkdir -p /run/sshd
+ cat >/etc/ssh/sshd_config.d/lab04.conf <
+
+
+SSH test
+
+```
+$ ssh -i ~/.ssh/id_ed25519 -p 2222 devops@127.0.0.1 echo "SSH available"
+The authenticity of host '[127.0.0.1]:2222 ([127.0.0.1]:2222)' can't be established.
+ED25519 key fingerprint is: SHA256:shGIrzMssSaR8sB9yuUyId7BYrKHyfi/OQSvGJq5gkk
+This key is not known by any other names.
+Are you sure you want to continue connecting (yes/no/[fingerprint])? yes
+Warning: Permanently added '[127.0.0.1]:2222' (ED25519) to the list of known hosts.
+SSH available
+```
+
+
+
+Teardown command (Terraform resources):
+
+```bash
+cd terraform
+tofu destroy -auto-approve
+```
+
+## 3. Pulumi Implementation
+
+- Pulumi CLI: `v3.192.0`
+- Language: Python
+- Project path: `pulumi/`
+- Resources:
+ - Docker network
+ - Docker `RemoteImage` (`ubuntu:24.04`)
+ - Docker container with same ports as Terraform setup
+
+### Code Differences vs Terraform
+
+- Terraform uses declarative HCL resources and variable blocks.
+- Pulumi uses Python (`__main__.py`) and typed constructor args (`docker.ContainerPortArgs`, `docker.ContainerLabelArgs`).
+- The shared provisioning logic is loaded from `docker/provision_vm.sh` in both implementations, but Pulumi reads it via `Path(...).read_text()`, while Terraform uses `file(...)`.
+
+### Advantages Discovered
+
+- Strong typing and native language constructs in Python made refactoring (for example, shared provisioning script usage) easier.
+- Pulumi outputs and resource objects map naturally to normal programming workflows.
+- For this lab size, Pulumi and Terraform were both fast enough; Pulumi felt better when logic started to grow.
+
+### Challenges
+
+- Pulumi passphrase prompts can interrupt command flow if `PULUMI_CONFIG_PASSPHRASE` is not set.
+- On Nix/Home-Manager-based setups, `pulumi-language-python` may be missing from `PATH`, which blocks `preview/up` until fixed.
+- Docker provider behavior is similar across tools, but plugin/setup issues differ and require separate troubleshooting steps.
+
+### Command Output
+
+`tofu destroy` before Pulumi migration:
+
+```bash
+$ tofu destroy -auto-approve
+```
+
+
+`pulumi preview`
+
+```
+$ pulumi preview
+Enter your passphrase to unlock config/secrets
+ (set PULUMI_CONFIG_PASSPHRASE or PULUMI_CONFIG_PASSPHRASE_FILE to remember):
+Enter your passphrase to unlock config/secrets
+Previewing update (dev):
+ Type Name Plan Info
+ + pulumi:pulumi:Stack lab04-local-docker-dev create 1 warning
+ + ββ docker:index:Network lab04-net create
+ + ββ docker:index:RemoteImage lab04-vm-image create
+ + ββ docker:index:Container lab04-vm create
+
+Diagnostics:
+ pulumi:pulumi:Stack (lab04-local-docker-dev):
+ warning: using pulumi-language-python from $PATH at /etc/profiles/per-user/t0ast/bin/pulumi-language-python
+
+Outputs:
+ appUrl : "http://127.0.0.1:5000"
+ containerShellCommand: "docker exec -it lab04-local-vm /bin/bash"
+ httpUrl : "http://127.0.0.1:80"
+ networkName : "lab04-local-net"
+ publicIpEquivalent : "127.0.0.1"
+ sshCommand : "ssh -i ~/.ssh/id_ed25519 -p 2222 devops@127.0.0.1"
+ vmName : "lab04-local-vm"
+
+Resources:
+ + 4 to create
+```
+
+
+
+
+`pulumi up`
+
+```
+$ pulumi up
+Enter your passphrase to unlock config/secrets
+ (set PULUMI_CONFIG_PASSPHRASE or PULUMI_CONFIG_PASSPHRASE_FILE to remember):
+Enter your passphrase to unlock config/secrets
+Previewing update (dev):
+ Type Name Plan Info
+ + pulumi:pulumi:Stack lab04-local-docker-dev create 1 warning
+ + ββ docker:index:RemoteImage lab04-vm-image create
+ + ββ docker:index:Network lab04-net create
+ + ββ docker:index:Container lab04-vm create
+
+Diagnostics:
+ pulumi:pulumi:Stack (lab04-local-docker-dev):
+ warning: using pulumi-language-python from $PATH at /etc/profiles/per-user/t0ast/bin/pulumi-language-python
+
+Outputs:
+ appUrl : "http://127.0.0.1:5000"
+ containerShellCommand: "docker exec -it lab04-local-vm /bin/bash"
+ httpUrl : "http://127.0.0.1:80"
+ networkName : "lab04-local-net"
+ publicIpEquivalent : "127.0.0.1"
+ sshCommand : "ssh -i ~/.ssh/id_ed25519 -p 2222 devops@127.0.0.1"
+ vmName : "lab04-local-vm"
+
+Resources:
+ + 4 to create
+
+Do you want to perform this update? yes
+Updating (dev):
+ Type Name Status Info
+ + pulumi:pulumi:Stack lab04-local-docker-dev created (2s) 1 warning
+ + ββ docker:index:RemoteImage lab04-vm-image created (0.01s)
+ + ββ docker:index:Network lab04-net created (2s)
+ + ββ docker:index:Container lab04-vm created (0.38s)
+
+Diagnostics:
+ pulumi:pulumi:Stack (lab04-local-docker-dev):
+ warning: using pulumi-language-python from $PATH at /etc/profiles/per-user/t0ast/bin/pulumi-language-python
+
+Outputs:
+ appUrl : "http://127.0.0.1:5000"
+ containerShellCommand: "docker exec -it lab04-local-vm /bin/bash"
+ httpUrl : "http://127.0.0.1:80"
+ networkName : "lab04-local-net"
+ publicIpEquivalent : "127.0.0.1"
+ sshCommand : "ssh -i ~/.ssh/id_ed25519 -p 2222 devops@127.0.0.1"
+ vmName : "lab04-local-vm"
+
+Resources:
+ + 4 created
+
+Duration: 3s
+```
+
+
+
+
+SSH test
+
+```
+$ ssh -i ~/.ssh/id_ed25519 -p 2222 devops@127.0.0.1 echo "SSH works"
+The authenticity of host '[127.0.0.1]:2222 ([127.0.0.1]:2222)' can't be established.
+ED25519 key fingerprint is: SHA256:spW/AgFoqrVqpf1i7ZWEUqYGXJ8rZM6wGU5+S4WheVI
+This key is not known by any other names.
+Are you sure you want to continue connecting (yes/no/[fingerprint])? yes
+Warning: Permanently added '[127.0.0.1]:2222' (ED25519) to the list of known hosts.
+SSH works
+```
+
+
+
+Teardown command (Pulumi resources):
+
+```bash
+pulumi destroy --yes
+```
+
+## 4. Terraform vs Pulumi (Local Docker Case)
+
+### Ease of Learning
+
+Terraform/OpenTofu was faster to start because the resource graph is explicit in HCL and examples are abundant. I needed less scaffolding to get a first working run with `tofu init/plan/apply`. Pulumi required understanding stack config and language-plugin behavior in addition to infrastructure code.
+
+### Code Readability
+
+For small infrastructure, Terraform is shorter and easier to scan in one file. Pulumi is more verbose, but the Python structure becomes clearer when the project grows and you need reusable helpers. In this lab, Terraform is more concise, while Pulumi is more flexible.
+
+### Debugging
+
+Terraform plan/apply diffs are straightforward and helped quickly validate expected port mappings and resource creation. Pulumi diagnostics were helpful when runtime issues occurred, but setup-level failures (passphrase/plugin) were less obvious initially. Once setup was correct, both were manageable to debug.
+
+### Documentation
+
+Terraform has broader community examples and more copy-paste-ready snippets for common patterns. Pulumi official docs are good and practical, but there are fewer examples for some edge workflows. For this lab, Terraform documentation felt easier to navigate quickly.
+
+### Use Case
+
+I would choose Terraform/OpenTofu for straightforward declarative infrastructure with predictable patterns. I would choose Pulumi when infrastructure logic needs stronger abstraction, conditional behavior, or shared code with application teams. For this local Docker lab, either works, but Terraform was simpler and Pulumi was more programmable.
+
+## 5. Lab 5 Preparation & Cleanup
+
+### VM for Lab 5
+
+- Are you keeping your VM for Lab 5? **No**.
+- What will you use for Lab 5? A local VM via libvirt, or a fresh Linux VPS.
+
+### Cleanup Status
+
+- Decision: destroy both Terraform and Pulumi-managed resources after verification.
+- Teardown commands used:
+
+```bash
+cd terraform
+tofu destroy -auto-approve
+
+cd ../pulumi
+pulumi destroy --yes
+```
+
+- Verification commands:
+
+```bash
+docker ps --format '{{.Names}}' | rg 'lab04-local' || echo "No lab04 containers"
+docker network ls --format '{{.Name}}' | rg 'lab04-local' || echo "No lab04 networks"
+```
+
+## Notes
+
+- This is a local Docker-provider adaptation of a cloud-VM lab.
+- Suggestion: this lab could also use `localstack/localstack` (or forks) to emulate parts of AWS locally for free.
+ -
+ -
+ - The developer recently stated the end of support for this community image, but there will most probably be forks.
diff --git a/pulumi/.gitignore b/pulumi/.gitignore
new file mode 100644
index 0000000000..a3807e5bdb
--- /dev/null
+++ b/pulumi/.gitignore
@@ -0,0 +1,2 @@
+*.pyc
+venv/
diff --git a/pulumi/Pulumi.dev.yaml.example b/pulumi/Pulumi.dev.yaml.example
new file mode 100644
index 0000000000..aaf637b3de
--- /dev/null
+++ b/pulumi/Pulumi.dev.yaml.example
@@ -0,0 +1,10 @@
+config:
+ lab04-local-docker:projectName: lab04-local
+ lab04-local-docker:vmUser: devops
+ lab04-local-docker:sshPublicKey: "ssh-ed25519 AAAA... your-user@host"
+ lab04-local-docker:sshPrivateKeyPath: "~/.ssh/id_ed25519"
+ lab04-local-docker:sshBindIp: 127.0.0.1
+ lab04-local-docker:publicBindIp: 0.0.0.0
+ lab04-local-docker:sshHostPort: "2222"
+ lab04-local-docker:httpHostPort: "80"
+ lab04-local-docker:appHostPort: "5000"
diff --git a/pulumi/Pulumi.yaml b/pulumi/Pulumi.yaml
new file mode 100644
index 0000000000..a4a92ce5d1
--- /dev/null
+++ b/pulumi/Pulumi.yaml
@@ -0,0 +1,11 @@
+name: lab04-local-docker
+description: IaC using Pulumi with local Docker provider.
+runtime:
+ name: python
+ options:
+ toolchain: pip
+ virtualenv: venv
+config:
+ pulumi:tags:
+ value:
+ pulumi:template: python
diff --git a/pulumi/__main__.py b/pulumi/__main__.py
new file mode 100644
index 0000000000..a26adf200d
--- /dev/null
+++ b/pulumi/__main__.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from pathlib import Path
+
+import pulumi
+import pulumi_docker as docker
+
+
+@dataclass(frozen=True)
+class HostPorts:
+ ssh: int
+ http: int
+ app: int
+
+
+config = pulumi.Config()
+
+project_name: str = config.get("projectName") or "lab04-local"
+vm_user: str = config.get("vmUser") or "devops"
+ssh_public_key: str = config.require("sshPublicKey")
+ssh_private_key_path: str = config.get("sshPrivateKeyPath") or "~/.ssh/id_ed25519"
+ssh_bind_ip: str = config.get("sshBindIp") or "127.0.0.1"
+public_bind_ip: str = config.get("publicBindIp") or "0.0.0.0"
+
+ports = HostPorts(
+ ssh=config.get_int("sshHostPort") or 2222,
+ http=config.get_int("httpHostPort") or 80,
+ app=config.get_int("appHostPort") or 5000,
+)
+
+labels: dict[str, str] = {
+ "lab": "04",
+ "managed-by": "pulumi",
+ "project": project_name,
+}
+
+bootstrap_script = (Path(__file__).resolve().parent.parent / "docker" / "provision_vm.sh").read_text(
+ encoding="utf-8"
+)
+
+network = docker.Network(
+ "lab04-net",
+ name=f"{project_name}-net",
+ labels=[docker.NetworkLabelArgs(label=k, value=v) for k, v in labels.items()],
+)
+
+image = docker.RemoteImage(
+ "lab04-vm-image",
+ name="ubuntu:24.04",
+ keep_locally=True,
+)
+
+container = docker.Container(
+ "lab04-vm",
+ name=f"{project_name}-vm",
+ image=image.repo_digest,
+ hostname=f"{project_name}-vm",
+ restart="unless-stopped",
+ command=["/bin/bash", "-lc", bootstrap_script],
+ envs=[f"VM_USER={vm_user}", f"SSH_PUBLIC_KEY={ssh_public_key}"],
+ ports=[
+ docker.ContainerPortArgs(internal=22, external=ports.ssh, ip=ssh_bind_ip, protocol="tcp"),
+ docker.ContainerPortArgs(internal=80, external=ports.http, ip=public_bind_ip, protocol="tcp"),
+ docker.ContainerPortArgs(internal=5000, external=ports.app, ip=public_bind_ip, protocol="tcp"),
+ ],
+ labels=[docker.ContainerLabelArgs(label=k, value=v) for k, v in labels.items()],
+ networks_advanced=[
+ docker.ContainerNetworksAdvancedArgs(
+ name=network.name,
+ aliases=[f"{project_name}-vm"],
+ )
+ ],
+)
+
+pulumi.export("vmName", container.name)
+pulumi.export("networkName", network.name)
+pulumi.export("publicIpEquivalent", "127.0.0.1")
+pulumi.export("sshCommand", f"ssh -i {ssh_private_key_path} -p {ports.ssh} {vm_user}@127.0.0.1")
+pulumi.export("containerShellCommand", f"docker exec -it {project_name}-vm /bin/bash")
+pulumi.export("httpUrl", f"http://127.0.0.1:{ports.http}")
+pulumi.export("appUrl", f"http://127.0.0.1:{ports.app}")
diff --git a/pulumi/requirements.txt b/pulumi/requirements.txt
new file mode 100644
index 0000000000..f5a5f5ebd4
--- /dev/null
+++ b/pulumi/requirements.txt
@@ -0,0 +1,2 @@
+pulumi>=3.0.0,<4.0.0
+pulumi_docker>=4.0.0,<5.0.0
diff --git a/terraform/README.md b/terraform/README.md
new file mode 100644
index 0000000000..7097fc4b2c
--- /dev/null
+++ b/terraform/README.md
@@ -0,0 +1,45 @@
+# Lab04 Terraform (Local Docker Provider)
+
+This Terraform project implements Lab04 with the local Docker provider instead of a cloud VM.
+
+## What It Creates
+
+- Docker network (`network/VPC` equivalent)
+- Ubuntu 24.04 container (`VM/compute` equivalent) with startup bootstrap
+ - installs and starts `openssh-server`
+ - configures SSH authorized key
+ - starts simple HTTP endpoints on ports `80` and `5000`
+- Port mappings as firewall equivalents:
+ - SSH: container `22` -> host `2222` (bound to `127.0.0.1`)
+ - HTTP: container `80` -> host `8080`
+ - App: container `5000` -> host `5000`
+
+## Local Prerequisites
+
+- Docker daemon running
+- OpenTofu or Terraform CLI
+
+## Quick Start (OpenTofu)
+
+```bash
+cp terraform/terraform.tfvars.example terraform/terraform.tfvars
+# edit terraform.tfvars and set ssh_public_key
+
+cd terraform
+tofu init -plugin-dir="$HOME/.terraform.d/plugins"
+tofu plan
+tofu apply -auto-approve
+
+# verify SSH
+ssh -i ~/.ssh/id_ed25519 -p 2222 devops@127.0.0.1 'echo SSH_OK'
+```
+
+If provider download is blocked, manually place provider binaries under:
+`~/.terraform.d/plugins/registry.terraform.io////linux_amd64/`
+
+## Destroy
+
+```bash
+cd terraform
+tofu destroy -auto-approve
+```
diff --git a/terraform/main.tf b/terraform/main.tf
new file mode 100644
index 0000000000..e5fc12074c
--- /dev/null
+++ b/terraform/main.tf
@@ -0,0 +1,82 @@
+provider "docker" {
+ host = var.docker_host
+}
+
+locals {
+ vm_name = "${var.project_name}-vm"
+ bootstrap_script = file("${path.module}/../docker/provision_vm.sh")
+
+ default_labels = {
+ lab = "04"
+ managed-by = "terraform"
+ project = var.project_name
+ }
+
+ resource_labels = merge(local.default_labels, var.extra_labels)
+}
+
+resource "docker_network" "lab04" {
+ name = "${var.project_name}-net"
+
+ dynamic "labels" {
+ for_each = local.resource_labels
+
+ content {
+ label = labels.key
+ value = labels.value
+ }
+ }
+}
+
+resource "docker_image" "vm_image" {
+ name = "ubuntu:24.04"
+ keep_locally = true
+}
+
+resource "docker_container" "vm" {
+ name = local.vm_name
+ image = docker_image.vm_image.image_id
+ hostname = local.vm_name
+ restart = "unless-stopped"
+ command = ["/bin/bash", "-lc", local.bootstrap_script]
+
+ env = [
+ "VM_USER=${var.vm_user}",
+ "SSH_PUBLIC_KEY=${var.ssh_public_key}",
+ ]
+
+ networks_advanced {
+ name = docker_network.lab04.name
+ aliases = [local.vm_name]
+ }
+
+ ports {
+ internal = 22
+ external = var.ssh_host_port
+ ip = var.ssh_bind_ip
+ protocol = "tcp"
+ }
+
+ ports {
+ internal = 80
+ external = var.http_host_port
+ ip = var.public_bind_ip
+ protocol = "tcp"
+ }
+
+ ports {
+ internal = 5000
+ external = var.app_host_port
+ ip = var.public_bind_ip
+ protocol = "tcp"
+ }
+
+ dynamic "labels" {
+ for_each = local.resource_labels
+
+ content {
+ label = labels.key
+ value = labels.value
+ }
+ }
+}
diff --git a/terraform/outputs.tf b/terraform/outputs.tf
new file mode 100644
index 0000000000..d8130abbce
--- /dev/null
+++ b/terraform/outputs.tf
@@ -0,0 +1,39 @@
+output "vm_name" {
+ description = "Name of the VM-like Docker container."
+ value = docker_container.vm.name
+}
+
+output "network_name" {
+ description = "Name of the Docker network (VPC equivalent)."
+ value = docker_network.lab04.name
+}
+
+output "container_ip" {
+ description = "Container IP inside the Docker network."
+ value = one(docker_container.vm.network_data).ip_address
+}
+
+output "public_ip_equivalent" {
+ description = "Host endpoint used as public access in the local provider setup."
+ value = "127.0.0.1"
+}
+
+output "ssh_command" {
+ description = "SSH command for the VM-like container."
+ value = "ssh -i ${var.ssh_private_key_path} -p ${var.ssh_host_port} ${var.vm_user}@127.0.0.1"
+}
+
+output "container_shell_command" {
+ description = "Direct shell access without SSH."
+ value = "docker exec -it ${docker_container.vm.name} /bin/bash"
+}
+
+output "http_url" {
+ description = "HTTP endpoint (port 80 equivalent)."
+ value = "http://127.0.0.1:${var.http_host_port}"
+}
+
+output "app_url" {
+ description = "Application endpoint (port 5000 equivalent)."
+ value = "http://127.0.0.1:${var.app_host_port}"
+}
diff --git a/terraform/terraform.tfvars.example b/terraform/terraform.tfvars.example
new file mode 100644
index 0000000000..2642161e51
--- /dev/null
+++ b/terraform/terraform.tfvars.example
@@ -0,0 +1,13 @@
+ssh_public_key = "ssh-ed25519 AAAA... your-user@host"
+
+# Optional overrides:
+# vm_user = "devops"
+# ssh_private_key_path = "~/.ssh/id_ed25519"
+# ssh_host_port = 2222
+# http_host_port = 80
+# app_host_port = 5000
+# ssh_bind_ip = "127.0.0.1"
+# public_bind_ip = "0.0.0.0"
+# extra_labels = {
+# owner = "your-name"
+# }
diff --git a/terraform/variables.tf b/terraform/variables.tf
new file mode 100644
index 0000000000..d6ae2bfd65
--- /dev/null
+++ b/terraform/variables.tf
@@ -0,0 +1,80 @@
+variable "docker_host" {
+ description = "Docker daemon socket."
+ type = string
+ default = "unix:///var/run/docker.sock"
+}
+
+variable "project_name" {
+ description = "Prefix used for Docker resource names."
+ type = string
+ default = "lab04-local"
+}
+
+variable "vm_user" {
+ description = "Linux username created inside the VM-like container for SSH access."
+ type = string
+ default = "devops"
+}
+
+variable "ssh_public_key" {
+ description = "SSH public key allowed to access the VM-like container."
+ type = string
+ sensitive = true
+}
+
+variable "ssh_private_key_path" {
+ description = "Private key path used in the rendered SSH command output."
+ type = string
+ default = "~/.ssh/id_ed25519"
+}
+
+variable "ssh_bind_ip" {
+ description = "Host IP used for SSH binding. Keep 127.0.0.1 to restrict access."
+ type = string
+ default = "127.0.0.1"
+}
+
+variable "public_bind_ip" {
+ description = "Host IP used for HTTP and app ports."
+ type = string
+ default = "0.0.0.0"
+}
+
+variable "ssh_host_port" {
+ description = "Host port mapped to container port 22."
+ type = number
+ default = 2222
+
+ validation {
+ condition = var.ssh_host_port >= 1 && var.ssh_host_port <= 65535
+ error_message = "ssh_host_port must be between 1 and 65535."
+ }
+}
+
+variable "http_host_port" {
+ description = "Host port mapped to container port 80."
+ type = number
+ default = 80
+
+ validation {
+ condition = var.http_host_port >= 1 && var.http_host_port <= 65535
+ error_message = "http_host_port must be between 1 and 65535."
+ }
+}
+
+variable "app_host_port" {
+ description = "Host port mapped to container port 5000."
+ type = number
+ default = 5000
+
+ validation {
+ condition = var.app_host_port >= 1 && var.app_host_port <= 65535
+ error_message = "app_host_port must be between 1 and 65535."
+ }
+}
+
+variable "extra_labels" {
+ description = "Additional Docker labels to attach to resources."
+ type = map(string)
+ default = {}
+}
diff --git a/terraform/versions.tf b/terraform/versions.tf
new file mode 100644
index 0000000000..db6d92cec4
--- /dev/null
+++ b/terraform/versions.tf
@@ -0,0 +1,10 @@
+terraform {
+ required_version = ">= 1.9.0"
+
+ required_providers {
+ docker = {
+ source = "registry.terraform.io/kreuzwerker/docker"
+ version = "~> 3.6"
+ }
+ }
+}