From 430a889b514e4511450828562ab98e951f40abf0 Mon Sep 17 00:00:00 2001 From: existentialcoder Date: Sun, 9 Nov 2025 19:09:09 +0100 Subject: [PATCH 1/2] feat: Enhance company and job APIs --- .gitignore | 2 + apps/backend/Makefile | 27 ++++ apps/backend/alembic.ini | 147 ++++++++++++++++++ apps/backend/alembic/README | 1 + apps/backend/alembic/env.py | 61 ++++++++ apps/backend/alembic/script.py.mako | 28 ++++ .../alembic/versions/502c4a9e34f2_init.py | 42 +++++ ...79d27_remove_prof_level_in_skills_table.py | 32 ++++ .../bd49d8c6e851_add_p_w_column_to_user.py | 32 ++++ .../cf762c0cb2b6_remove_founded_year.py | 44 ++++++ ...ff6138599843_remove_location_in_company.py | 34 ++++ apps/backend/run.sh | 0 apps/backend/src/api/v1/routes/companies.py | 31 ++++ apps/backend/src/api/v1/routes/jobs.py | 60 +++---- apps/backend/src/api/v1/routes/users.py | 12 ++ apps/backend/src/db/base_class.py | 13 +- apps/backend/src/main.py | 15 +- apps/backend/src/models/__init__.py | 14 ++ apps/backend/src/models/company.py | 3 - apps/backend/src/models/job.py | 53 ++++++- apps/backend/src/models/location.py | 10 ++ apps/backend/src/models/skill.py | 1 - apps/backend/src/models/user.py | 1 + apps/backend/src/schemas/base.py | 14 ++ apps/backend/src/schemas/company.py | 25 +-- apps/backend/src/schemas/job.py | 63 ++++---- apps/backend/src/schemas/skill.py | 13 +- apps/backend/src/schemas/user.py | 7 +- apps/backend/src/services/company.py | 87 +++++++++++ apps/backend/src/services/job.py | 88 +++++++++-- 30 files changed, 841 insertions(+), 119 deletions(-) create mode 100644 apps/backend/Makefile create mode 100644 apps/backend/alembic.ini create mode 100644 apps/backend/alembic/README create mode 100644 apps/backend/alembic/env.py create mode 100644 apps/backend/alembic/script.py.mako create mode 100644 apps/backend/alembic/versions/502c4a9e34f2_init.py create mode 100644 apps/backend/alembic/versions/804e94079d27_remove_prof_level_in_skills_table.py create mode 100644 apps/backend/alembic/versions/bd49d8c6e851_add_p_w_column_to_user.py create mode 100644 apps/backend/alembic/versions/cf762c0cb2b6_remove_founded_year.py create mode 100644 apps/backend/alembic/versions/ff6138599843_remove_location_in_company.py mode change 100644 => 100755 apps/backend/run.sh create mode 100644 apps/backend/src/models/__init__.py create mode 100644 apps/backend/src/models/location.py create mode 100644 apps/backend/src/schemas/base.py create mode 100644 apps/backend/src/services/company.py diff --git a/.gitignore b/.gitignore index 9a5aced..37f68fa 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +.vscode + # Logs logs *.log diff --git a/apps/backend/Makefile b/apps/backend/Makefile new file mode 100644 index 0000000..ade4953 --- /dev/null +++ b/apps/backend/Makefile @@ -0,0 +1,27 @@ +SHELL := /bin/bash +.ONESHELL: +.PHONY: build run-local activate-env + +VENV_DIR := api-backend-env +PYTHON := python3 +PIP := $(VENV_DIR)/bin/pip + +activate-env: + @echo "šŸ”¹ Activating virtual environment..." + if [ ! -d "$(VENV_DIR)" ]; then + echo "Creating virtual environment..." + $(PYTHON) -m venv $(VENV_DIR) + fi + source $(VENV_DIR)/bin/activate + echo "āœ… Virtual environment activated." + +build: activate-env + @echo "šŸ”¹ Installing dependencies..." + $(PIP) install -r requirements.txt + @echo "āœ… Dependencies installed." + +run-local: build + @echo "šŸ”¹ Running the project locally..." + chmod +x ./run.sh + $(VENV_DIR)/bin/python -m uvicorn src.main:app --reload + diff --git a/apps/backend/alembic.ini b/apps/backend/alembic.ini new file mode 100644 index 0000000..1b03b05 --- /dev/null +++ b/apps/backend/alembic.ini @@ -0,0 +1,147 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH +# hooks = ruff +# ruff.type = exec +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/apps/backend/alembic/README b/apps/backend/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/apps/backend/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/apps/backend/alembic/env.py b/apps/backend/alembic/env.py new file mode 100644 index 0000000..b68691b --- /dev/null +++ b/apps/backend/alembic/env.py @@ -0,0 +1,61 @@ +import sys +import os +from logging.config import fileConfig +from sqlalchemy import engine_from_config, pool +from alembic import context + +# --- Add project root to path --- +sys.path.append(os.path.join(os.path.dirname(__file__), '../src')) + +from src.core.config import settings +from src.db.base_class import Base + +# This will import all models so Alembic can detect them +import src.models # ensure this imports all model files + +# this is the Alembic Config object, which provides access to values within the .ini file +config = context.config + +# Interpret the config file for Python logging. +fileConfig(config.config_file_name) + +# overwrite the sqlalchemy.url dynamically from settings +config.set_main_option("sqlalchemy.url", settings.DATABASE_URL) + +# add your model's MetaData object here +target_metadata = Base.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode.""" + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode.""" + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/apps/backend/alembic/script.py.mako b/apps/backend/alembic/script.py.mako new file mode 100644 index 0000000..1101630 --- /dev/null +++ b/apps/backend/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/apps/backend/alembic/versions/502c4a9e34f2_init.py b/apps/backend/alembic/versions/502c4a9e34f2_init.py new file mode 100644 index 0000000..1a2e0c0 --- /dev/null +++ b/apps/backend/alembic/versions/502c4a9e34f2_init.py @@ -0,0 +1,42 @@ +"""init + +Revision ID: 502c4a9e34f2 +Revises: +Create Date: 2025-11-07 11:07:54.288880 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '502c4a9e34f2' +down_revision: Union[str, Sequence[str], None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('users') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('users', + sa.Column('first_name', sa.VARCHAR(length=100), autoincrement=False, nullable=False), + sa.Column('last_name', sa.VARCHAR(length=100), autoincrement=False, nullable=False), + sa.Column('user_name', sa.VARCHAR(length=100), autoincrement=False, nullable=True), + sa.Column('email', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('signup_key', sa.VARCHAR(length=255), autoincrement=False, nullable=False), + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('users_pkey')) + ) + # ### end Alembic commands ### diff --git a/apps/backend/alembic/versions/804e94079d27_remove_prof_level_in_skills_table.py b/apps/backend/alembic/versions/804e94079d27_remove_prof_level_in_skills_table.py new file mode 100644 index 0000000..2bfb9df --- /dev/null +++ b/apps/backend/alembic/versions/804e94079d27_remove_prof_level_in_skills_table.py @@ -0,0 +1,32 @@ +"""remove prof level in skills table + +Revision ID: 804e94079d27 +Revises: ff6138599843 +Create Date: 2025-11-09 11:57:43.729252 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '804e94079d27' +down_revision: Union[str, Sequence[str], None] = 'ff6138599843' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('skills', 'proficiency_level') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('skills', sa.Column('proficiency_level', sa.INTEGER(), autoincrement=False, nullable=False)) + # ### end Alembic commands ### diff --git a/apps/backend/alembic/versions/bd49d8c6e851_add_p_w_column_to_user.py b/apps/backend/alembic/versions/bd49d8c6e851_add_p_w_column_to_user.py new file mode 100644 index 0000000..7dd2d2a --- /dev/null +++ b/apps/backend/alembic/versions/bd49d8c6e851_add_p_w_column_to_user.py @@ -0,0 +1,32 @@ +"""add p/w column to user + +Revision ID: bd49d8c6e851 +Revises: 804e94079d27 +Create Date: 2025-11-09 18:41:10.212733 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'bd49d8c6e851' +down_revision: Union[str, Sequence[str], None] = '804e94079d27' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('users', sa.Column('hashed_password', sa.String(length=255), nullable=False)) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('users', 'hashed_password') + # ### end Alembic commands ### diff --git a/apps/backend/alembic/versions/cf762c0cb2b6_remove_founded_year.py b/apps/backend/alembic/versions/cf762c0cb2b6_remove_founded_year.py new file mode 100644 index 0000000..d9e0df3 --- /dev/null +++ b/apps/backend/alembic/versions/cf762c0cb2b6_remove_founded_year.py @@ -0,0 +1,44 @@ +"""remove founded year + +Revision ID: cf762c0cb2b6 +Revises: 502c4a9e34f2 +Create Date: 2025-11-08 16:06:38.294108 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'cf762c0cb2b6' +down_revision: Union[str, Sequence[str], None] = '502c4a9e34f2' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('users', + sa.Column('first_name', sa.String(length=100), nullable=False), + sa.Column('last_name', sa.String(length=100), nullable=False), + sa.Column('user_name', sa.String(length=100), nullable=True), + sa.Column('email', sa.String(length=255), nullable=True), + sa.Column('signup_key', sa.String(length=255), nullable=False), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.drop_column('companies', 'founded_year') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('companies', sa.Column('founded_year', sa.INTEGER(), autoincrement=False, nullable=True)) + op.drop_table('users') + # ### end Alembic commands ### diff --git a/apps/backend/alembic/versions/ff6138599843_remove_location_in_company.py b/apps/backend/alembic/versions/ff6138599843_remove_location_in_company.py new file mode 100644 index 0000000..35f883f --- /dev/null +++ b/apps/backend/alembic/versions/ff6138599843_remove_location_in_company.py @@ -0,0 +1,34 @@ +"""remove location in company + +Revision ID: ff6138599843 +Revises: cf762c0cb2b6 +Create Date: 2025-11-09 11:54:13.597008 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'ff6138599843' +down_revision: Union[str, Sequence[str], None] = 'cf762c0cb2b6' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('companies', 'headquarters') + op.drop_column('companies', 'location') + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('companies', sa.Column('location', sa.VARCHAR(length=255), autoincrement=False, nullable=False)) + op.add_column('companies', sa.Column('headquarters', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) + # ### end Alembic commands ### diff --git a/apps/backend/run.sh b/apps/backend/run.sh old mode 100644 new mode 100755 diff --git a/apps/backend/src/api/v1/routes/companies.py b/apps/backend/src/api/v1/routes/companies.py index e69de29..54f268e 100644 --- a/apps/backend/src/api/v1/routes/companies.py +++ b/apps/backend/src/api/v1/routes/companies.py @@ -0,0 +1,31 @@ +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session + +from ....schemas import company as schemas +from ...deps.db import get_db +from ...deps.pagination import pagination_params +from ....services import company as company_service + +router = APIRouter(prefix='/companies') + +@router.get('', response_model=company_service.PaginatedCompanies, description='List all companies') +def list_companies(pagination: dict = Depends(pagination_params), db: Session = Depends(get_db)): + return company_service.get_companies(db, pagination) + +# @router.get('/{company_id}', response_model=schemas.JobBase) +# def get_company(company_id: int, db: Session = Depends(get_db)): +# db_job = company_service.get_company(db, company_id) +# if not db_job: +# raise HTTPException(status_code=404, detail='Job not found') +# return db_job + +@router.post('/', response_model=schemas.Company, description='Create a new company') +def create_company(company_data: schemas.CompanyCreate, db: Session = Depends(get_db)): + return company_service.create_company(db, company_data) + +# @router.delete('/{company_id}', description='Delete existing job') +# def delete_company(company_id: int, db: Session = Depends(get_db)): +# success = company_service.delete_job(db, company_id) +# if not success: +# raise HTTPException(status_code=404, detail='Job not found') +# return {'detail': 'Job deleted'} diff --git a/apps/backend/src/api/v1/routes/jobs.py b/apps/backend/src/api/v1/routes/jobs.py index 4bbb4b7..276c779 100644 --- a/apps/backend/src/api/v1/routes/jobs.py +++ b/apps/backend/src/api/v1/routes/jobs.py @@ -1,6 +1,5 @@ from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session -from uuid import UUID from ....schemas import job as schemas from ...deps.db import get_db @@ -9,32 +8,33 @@ router = APIRouter(prefix='/jobs') -# @router.post('/', response_model=schemas.JobBase) -# def create_job(job_in: schemas.JobBase, db: Session = Depends(get_db)): -# return job_service.create_job(db, job_in) - -# @router.get('/{job_id}', response_model=schemas.JobBase) -# def get_job(job_id: UUID, db: Session = Depends(get_db)): -# db_job = job_service.get_job(db, job_id) -# if not db_job: -# raise HTTPException(status_code=404, detail='Job not found') -# return db_job - - -@router.get('/', response_model=job_service.PaginatedJobs) -def list_jobs(query: str | None = None, pagination: dict = Depends(pagination_params), db: Session = Depends(get_db)): - return job_service.get_jobs(db, pagination, query) - -# @router.put('/{job_id}', response_model=schemas.JobBase) -# def update_job(job_id: UUID, job_update: schemas.JobBase, db: Session = Depends(get_db)): -# db_job = job_service.update_job(db, job_id, job_update) -# if not db_job: -# raise HTTPException(status_code=404, detail='Job not found') -# return db_job - -# @router.delete('/{job_id}')bs -# def delete_job(job_id: UUID, db: Session = Depends(get_db)): -# success = job_service.delete_job(db, job_id) -# if not success: -# raise HTTPException(status_code=404, detail='Job not found') -# return {'detail': 'Job deleted'} +@router.get('', response_model=job_service.PaginatedJobs, description='List all jobs') +def list_jobs(filters: schemas.JobFilterParams = Depends(), pagination: dict = Depends(pagination_params), db: Session = Depends(get_db)): + return job_service.get_jobs(db, pagination, filters) + +@router.get('/{job_id}', response_model=schemas.JobBase) +def get_job(job_id: int, db: Session = Depends(get_db)): + db_job = job_service.get_job(db, job_id) + if not db_job: + raise HTTPException(status_code=404, detail='Job not found') + return db_job + +@router.post('/', response_model=schemas.JobBase, description='Create a new job') +def create_job(job_in: schemas.JobCreate, db: Session = Depends(get_db)): + return job_service.create_job(db, job_in) + + +@router.patch('/{job_id}', response_model=schemas.JobBase, description='Update existing job') +def update_job(job_id: int, job_update: schemas.JobUpdate, db: Session = Depends(get_db)): + db_job = job_service.update_job(db, job_id, job_update) + if not db_job: + raise HTTPException(status_code=404, detail='Job not found') + + return db_job + +@router.delete('/{job_id}', description='Delete existing job') +def delete_job(job_id: int, db: Session = Depends(get_db)): + success = job_service.delete_job(db, job_id) + if not success: + raise HTTPException(status_code=404, detail='Job not found') + return {'detail': 'Job deleted'} diff --git a/apps/backend/src/api/v1/routes/users.py b/apps/backend/src/api/v1/routes/users.py index e69de29..a907e04 100644 --- a/apps/backend/src/api/v1/routes/users.py +++ b/apps/backend/src/api/v1/routes/users.py @@ -0,0 +1,12 @@ +from fastapi import APIRouter, Depends +from sqlalchemy.orm import Session + +from ....schemas import user as schemas +from ...deps.db import get_db +from ....services import user_service + +router = APIRouter(prefix='/users') + +@router.post('/', description='Create new user') + + diff --git a/apps/backend/src/db/base_class.py b/apps/backend/src/db/base_class.py index 9ff83dd..139a9bf 100644 --- a/apps/backend/src/db/base_class.py +++ b/apps/backend/src/db/base_class.py @@ -1,9 +1,14 @@ from datetime import datetime from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column -from sqlalchemy import DateTime +from sqlalchemy import DateTime, func, Integer class Base(DeclarativeBase): - id: Mapped[int] = mapped_column('id', primary_key=True, autoincrement=True) - created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) - updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) __abstract__ = True + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now(), nullable=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False + ) diff --git a/apps/backend/src/main.py b/apps/backend/src/main.py index 9b4739b..e158baf 100644 --- a/apps/backend/src/main.py +++ b/apps/backend/src/main.py @@ -1,5 +1,5 @@ from fastapi import FastAPI -from .api.v1.routes import jobs +from .api.v1.routes import jobs, companies from .models import company, job, skill, user from .db.base_class import Base from .db.session import engine @@ -11,8 +11,11 @@ api_v1_prefix = '/api/v1' -app.include_router(jobs.router, prefix=api_v1_prefix, tags=['Jobs']) -# app.include_router(users.router, prefix=api_v1_prefix, tags=['Users']) -# app.include_router(plugins.router, prefix=api_v1_prefix, tags=['Plugins']) -# app.include_router(auth.router, prefix=api_v1_prefix, tags=['Auth']) -# app.include_router(health.router, prefix=api_v1_prefix, tags=['Health']) +enabled_routes = [ + { 'tags': ['Jobs'], 'route': jobs }, + { 'tags': ['Companies'], 'route': companies }, + # { 'tags': ['Skills'], 'route': skills } +] + +for enabled_route in enabled_routes: + app.include_router(enabled_route['route'].router, prefix=api_v1_prefix, tags=enabled_route['tags']) diff --git a/apps/backend/src/models/__init__.py b/apps/backend/src/models/__init__.py new file mode 100644 index 0000000..788a3f6 --- /dev/null +++ b/apps/backend/src/models/__init__.py @@ -0,0 +1,14 @@ +import os +import sys +from pathlib import Path +from importlib import import_module + +from ..db.base_class import Base + +current_dir = Path(__file__).parent +sys.path.append(str(current_dir)) + +for file in os.listdir(current_dir): + if file.endswith('.py') and file != '__init__.py': + module_name = file[:-3] # remove .py + import_module(f".{module_name}", package="src.models") diff --git a/apps/backend/src/models/company.py b/apps/backend/src/models/company.py index 1cf657e..87b2b93 100644 --- a/apps/backend/src/models/company.py +++ b/apps/backend/src/models/company.py @@ -9,10 +9,7 @@ class Company(Base): name: Mapped[str] = mapped_column(String(100), nullable=False, unique=True) website: Mapped[str] = mapped_column(String(255), nullable=True) email: Mapped[str] = mapped_column(String(255), nullable=True) - location: Mapped[str] = mapped_column(String(255), nullable=False) size: Mapped[int] = mapped_column(Integer, nullable=True) - headquarters: Mapped[str] = mapped_column(String(255), nullable=True) - founded_year: Mapped[int] = mapped_column(Integer, nullable=True) industry: Mapped[str] = mapped_column(String(255), nullable=True) description: Mapped[str] = mapped_column(String(500), nullable=True) diff --git a/apps/backend/src/models/job.py b/apps/backend/src/models/job.py index e9ca0e2..eb82309 100644 --- a/apps/backend/src/models/job.py +++ b/apps/backend/src/models/job.py @@ -1,9 +1,10 @@ -# src/models/job.py -from sqlalchemy import String, Integer, ForeignKey, JSON +import enum +from sqlalchemy import Enum, String, Integer, ForeignKey, JSON from sqlalchemy.orm import Mapped, mapped_column, relationship from ..db.base_class import Base from .company import Company from .skill import Skill +from .location import Location from sqlalchemy import Table, Column @@ -14,15 +15,44 @@ Column('skill_id', Integer, ForeignKey('skills.id', ondelete='CASCADE'), primary_key=True), ) +class JobStatus(str, enum.Enum): + OPEN = 'Open' + CLOSED = 'Closed' + PENDING = 'Pending' + + +class JobPosition(str, enum.Enum): + INTERN = 'Intern' + JUNIOR = 'Junior' + MID = 'Mid' + SENIOR = 'Senior' + LEAD = 'Lead' + MANAGER = 'Manager' + +class JobWorkModel(str, enum.Enum): + ON_SITE = 'On-site' + REMOTE = 'Remote' + HYBRID = 'Hybrid' + class Job(Base): __tablename__ = 'jobs' - job_title: Mapped[str] = mapped_column(String(255), nullable=False) + title: Mapped[str] = mapped_column(String(255), nullable=False) - status: Mapped[str] = mapped_column(String(50), nullable=False) - position: Mapped[str] = mapped_column(String(50), nullable=False) + status: Mapped[JobStatus] = mapped_column( + Enum(JobStatus), + default=JobStatus.OPEN, + create_constraint=True, + nullable=False + ) + position: Mapped[JobPosition] = mapped_column( + Enum(JobPosition), + default=JobPosition.INTERN, + create_constraint=True, + nullable=True + ) category: Mapped[str] = mapped_column(String(100), nullable=True) salary_range: Mapped[str] = mapped_column(String(100), nullable=True) - job_description: Mapped[str] = mapped_column(String(500), nullable=True) + description: Mapped[str] = mapped_column(String(500), nullable=True) years_of_experience: Mapped[dict] = mapped_column(JSON, nullable=True) @@ -33,6 +63,15 @@ class Job(Base): 'Skill', secondary=job_skill_table ) + work_model: Mapped[JobWorkModel] = mapped_column( + Enum(JobWorkModel, name='jobworkmodel'), + default=JobWorkModel.ON_SITE, + create_constraint=True, + nullable=False + ) + + location: Mapped['Location'] = relationship('Location') + location_id: Mapped[int] = mapped_column(ForeignKey('locations.id'), nullable=True) def __repr__(self): - return f'' + return f'' diff --git a/apps/backend/src/models/location.py b/apps/backend/src/models/location.py new file mode 100644 index 0000000..ec710d7 --- /dev/null +++ b/apps/backend/src/models/location.py @@ -0,0 +1,10 @@ +from sqlalchemy import Enum, String, Integer, ForeignKey, JSON +from sqlalchemy.orm import Mapped, mapped_column, relationship +from ..db.base_class import Base + +class Location(Base): + __tablename__ = 'locations' + id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True) + city: Mapped[str] = mapped_column(String, index=True) + state: Mapped[str] = mapped_column(String, index=True) + country: Mapped[str] = mapped_column(String, index=True) diff --git a/apps/backend/src/models/skill.py b/apps/backend/src/models/skill.py index 1dd4c48..97fd996 100644 --- a/apps/backend/src/models/skill.py +++ b/apps/backend/src/models/skill.py @@ -5,7 +5,6 @@ class Skill(Base): __tablename__ = 'skills' name: Mapped[str] = mapped_column(String(255), nullable=False) - proficiency_level: Mapped[int] = mapped_column(Integer, nullable=False) def __repr__(self) -> str: return f'' diff --git a/apps/backend/src/models/user.py b/apps/backend/src/models/user.py index 11d8691..2ade4c5 100644 --- a/apps/backend/src/models/user.py +++ b/apps/backend/src/models/user.py @@ -9,5 +9,6 @@ class User(Base): user_name: Mapped[str | None] = mapped_column(String(100), nullable=True) email: Mapped[str | None] = mapped_column(String(255), nullable=True) signup_key: Mapped[str] = mapped_column(String(255), nullable=False) + hashed_password: Mapped[str] = mapped_column(String(255), nullable=False) def __repr__(self) -> str: return f'' diff --git a/apps/backend/src/schemas/base.py b/apps/backend/src/schemas/base.py new file mode 100644 index 0000000..f70beb9 --- /dev/null +++ b/apps/backend/src/schemas/base.py @@ -0,0 +1,14 @@ +from datetime import datetime +from pydantic import BaseModel, Field + +class BaseSchema(BaseModel): + """ + Base Pydantic schema that includes common fields present in all models. + """ + id: int | None = Field(None, description='Unique identifier') + created_at: datetime | None = Field(None, description='Creation timestamp') + updated_at: datetime | None = Field(None, description='Last update timestamp') + + model_config = { + 'from_attributes': True + } diff --git a/apps/backend/src/schemas/company.py b/apps/backend/src/schemas/company.py index 4da57b4..2eea56b 100644 --- a/apps/backend/src/schemas/company.py +++ b/apps/backend/src/schemas/company.py @@ -1,34 +1,21 @@ from pydantic import BaseModel, Field, EmailStr, HttpUrl -from uuid import UUID -from datetime import datetime -from typing import Optional +from .base import BaseSchema -class CompanyBase(BaseModel): +class Company(BaseModel): name: str = Field(..., max_length=100) website: HttpUrl | None = None email: EmailStr | None = None - location: str size: int | None = None - headquarters: str | None = None - founded_year: int | None = None - website: str | None = None industry: str | None = None description: str | None = None - - -class CompanyCreate(CompanyBase): +class CompanyBase(Company, BaseSchema): pass -class CompanyUpdate(CompanyBase): +class CompanyCreate(Company): pass -class Company(CompanyBase): - id: int - created_at: datetime - updated_at: datetime - - class Config: - from_attributes = True +class CompanyUpdate(Company): + pass diff --git a/apps/backend/src/schemas/job.py b/apps/backend/src/schemas/job.py index 943eafd..d13367a 100644 --- a/apps/backend/src/schemas/job.py +++ b/apps/backend/src/schemas/job.py @@ -3,23 +3,24 @@ from typing import Optional, List from datetime import date -from .company import Company +from .base import BaseSchema +from .company import CompanyBase from .skill import SkillBase class JobStatus(str, Enum): - OPEN = 'Open' - CLOSED = 'Closed' - PENDING = 'Pending' + Open = 'Open' + Closed = 'Closed' + Pending = 'Pending' class JobPosition(str, Enum): - INTERN = 'Intern' - JUNIOR = 'Junior' - MID = 'Mid' - SENIOR = 'Senior' - LEAD = 'Lead' - MANAGER = 'Manager' + Intern = 'Intern' + Junior = 'Junior' + Mid = 'Mid' + Senior = 'Senior' + Lead = 'Lead' + Manager = 'Manager' class YearsOfExperience(BaseModel): @@ -27,11 +28,11 @@ class YearsOfExperience(BaseModel): max: Optional[int] = Field(None, ge=0, description='Maximum years of experience') -class JobBase(BaseModel): +class JobBase(BaseSchema): title: str - company: Optional[Company] = None - status: JobStatus = JobStatus.OPEN - position: JobPosition + company: Optional[CompanyBase] = None + status: JobStatus = JobStatus.Open + position: JobPosition = JobPosition.Intern category: Optional[str] = None salary_range: Optional[str] = None required_skills: List[SkillBase] = [] @@ -41,22 +42,32 @@ class JobBase(BaseModel): model_config = {'from_attributes': True} -class JobRead(JobBase): - id: int - created_at: Optional[date] = None - updated_at: Optional[date] = None - - -class JobCreate(JobBase): - pass +class JobCreate(BaseModel): + title: str + company_id: Optional[int] = None + status: JobStatus = JobStatus.Open + position: JobPosition = JobPosition.Intern + category: Optional[str] = None + salary_range: Optional[str] = None + required_skills_ids: List[int] = [] + description: str + years_of_experience: Optional[YearsOfExperience] = None -class JobUpdate(BaseModel): - job_title: Optional[str] = None +class JobUpdate(JobCreate): + title: Optional[str] = None + company_id: Optional[int] = None status: Optional[JobStatus] = None position: Optional[JobPosition] = None category: Optional[str] = None salary_range: Optional[str] = None - required_skills: Optional[List[SkillBase]] = None - job_description: Optional[str] = None + required_skills_ids: Optional[List[int]] = None + description: Optional[str] = None years_of_experience: Optional[YearsOfExperience] = None + + +class JobFilterParams(BaseSchema): + query: Optional[str] = Field(None, description='Search query string') + title: Optional[str] = Field(None, description='Job title') + company: Optional[str] = Field(None, description='Company name') + location: Optional[str] = Field(None, description='Job location') diff --git a/apps/backend/src/schemas/skill.py b/apps/backend/src/schemas/skill.py index fa3c67f..d839a9a 100644 --- a/apps/backend/src/schemas/skill.py +++ b/apps/backend/src/schemas/skill.py @@ -1,14 +1,11 @@ from pydantic import BaseModel, Field +from .base import BaseSchema from typing import Optional -class SkillBase(BaseModel): +class Skill(BaseModel): + name: str +class SkillBase(Skill, BaseSchema): name: str - proficiency_level: int = Field(..., ge=1, le=10) - -class SkillCreate(SkillBase): +class SkillCreate(Skill): pass - -class Skill(SkillBase): - class Config: - from_attributes = True diff --git a/apps/backend/src/schemas/user.py b/apps/backend/src/schemas/user.py index c4b8b4e..83e5f13 100644 --- a/apps/backend/src/schemas/user.py +++ b/apps/backend/src/schemas/user.py @@ -1,8 +1,9 @@ +from enum import Enum from pydantic import BaseModel, Field, EmailStr -class UserSignupKey(str): - USER_NAME = 'username' - EMAIL = 'email' +class UserSignupKey(str, Enum): + USER_NAME = 'USER_NAME' + EMAIL = 'EMAIL' class UserBase(BaseModel): first_name: str diff --git a/apps/backend/src/services/company.py b/apps/backend/src/services/company.py new file mode 100644 index 0000000..2186d2a --- /dev/null +++ b/apps/backend/src/services/company.py @@ -0,0 +1,87 @@ +from sqlalchemy.orm import Session +from pydantic import HttpUrl +from fastapi import HTTPException +from ..models.company import Company +from ..schemas.company import CompanyBase, CompanyCreate, CompanyUpdate +from ..api.deps.pagination import build_paginated_response, get_paginated_response_model, paginate_query + +PaginatedCompanies = get_paginated_response_model(CompanyBase) + +def get_companies(db: Session, pagination: dict) -> PaginatedCompanies: + q = db.query(Company) + + total = q.count() + q = paginate_query(q, pagination) + + results = q.all() + + companies = [CompanyBase.model_validate(company) for company in results] + + return build_paginated_response( + items=companies, + total=total, + **pagination + ) + +# def get_job(db: Session, job_id: int) -> JobBase | None: +# db_job = db.query(Job).get(job_id) +# if db_job: +# return JobBase.model_validate(db_job) +# return None + +# def delete_job(db: Session, job_id: int) -> bool: +# db_job = db.query(Job).get(job_id) +# if not db_job: +# return False +# db.delete(db_job) +# db.commit() +# return True + +# def get_tranformed_job(db: Session, job_in: JobCreate | JobUpdate) -> dict: +# data = job_in.model_dump(exclude_unset=True) + +# # Replace skill IDs with actual Skill objects +# if job_in.required_skills_ids: +# data['required_skills'] = db.query(Skill).filter(Skill.id.in_(job_in.required_skills_ids)).all() +# data.pop('required_skills_ids', None) + +# # Replace company_id with actual Company object +# if job_in.company_id: +# company = db.query(Company).get(job_in.company_id) +# if company: +# data['company'] = company + +# return data + + +def create_company(db: Session, company_data: CompanyCreate) -> CompanyBase: + existing_companies_with_name = db.query(Company).filter(Company.name == company_data.name).all() + if len(existing_companies_with_name) > 0: + raise HTTPException(status_code=409, detail='Company already exists') + company_data = Company( + **company_data.model_dump(exclude_unset=True) + ) + + if isinstance(company_data.website, HttpUrl): + company_data.website = str(company_data.website) + + db.add(company_data) + db.commit() + db.refresh(company_data) + + return CompanyBase.model_validate(company_data) + + +# def update_job(db: Session, job_id: int, job_in: JobUpdate) -> JobBase | None: +# db_job = db.query(Job).filter(Job.id == job_id).first() +# if not db_job: +# return None + +# job_data = get_tranformed_job(db, job_in) + +# for key, value in job_data.items(): +# setattr(db_job, key, value) + +# db.commit() +# db.refresh(db_job) +# return JobBase.model_validate(db_job) diff --git a/apps/backend/src/services/job.py b/apps/backend/src/services/job.py index 336e2b9..bcc4922 100644 --- a/apps/backend/src/services/job.py +++ b/apps/backend/src/services/job.py @@ -1,32 +1,96 @@ from typing import Type from sqlalchemy.orm import Session from ..models.job import Job -from ..schemas.job import JobBase, JobRead +from ..models.skill import Skill +from ..models.company import Company +from ..schemas.job import JobBase, JobUpdate, JobCreate, JobFilterParams from ..api.deps.pagination import build_paginated_response, get_paginated_response_model, paginate_query -# allowed_job_query_fields = [JobBase.title, JobBase.description, JobBase.company] - PaginatedJobs = get_paginated_response_model(JobBase) -def get_jobs(db: Session, pagination: dict, query: str | None = None) -> PaginatedJobs: - # query maybe like this query="company: Acme Inc,location: Remote" +def get_jobs(db: Session, pagination: dict, filter: JobFilterParams | None = None) -> PaginatedJobs: q = db.query(Job) - # Optional search filter - # if query: - # filters = [getattr(Job, field).ilike(f"%{query}%") for field in allowed_job_query_fields] - # q = q.filter(or_(*filters)) + if filter.title: + q = q.filter(Job.title.ilike(f'%{filter.title}%')) + if filter.company: + q = q.filter(Job.company.ilike(f'%{filter.company}%')) + if filter.location: + q = q.filter(Job.location.ilike(f'%{filter.location}%')) + if filter.query: + q = q.filter( + Job.title.ilike(f'%{filter.query}%') | + Job.description.ilike(f'%{filter.query}%') + ) + - total = q.count() # total before pagination + total = q.count() q = paginate_query(q, pagination) results = q.all() - # convert SQLAlchemy models to Pydantic - jobs = [JobRead.from_orm(job) for job in results] + jobs = [JobBase.model_validate(job) for job in results] return build_paginated_response( items=jobs, total=total, **pagination ) + +def get_job(db: Session, job_id: int) -> JobBase | None: + db_job = db.query(Job).get(job_id) + if db_job: + return JobBase.model_validate(db_job) + return None + +def delete_job(db: Session, job_id: int) -> bool: + db_job = db.query(Job).get(job_id) + if not db_job: + return False + db.delete(db_job) + db.commit() + return True + +def get_tranformed_job(db: Session, job_in: JobCreate | JobUpdate) -> dict: + data = job_in.model_dump(exclude_unset=True) + + # Replace skill IDs with actual Skill objects + if job_in.required_skills_ids: + data['required_skills'] = db.query(Skill).filter(Skill.id.in_(job_in.required_skills_ids)).all() + data.pop('required_skills_ids', None) + + # Replace company_id with actual Company object + if job_in.company_id: + company = db.query(Company).get(job_in.company_id) + if company: + data['company'] = company + del data['company_id'] + + return data + + +def create_job(db: Session, job_in: JobCreate) -> JobBase: + job_data = get_tranformed_job(db, job_in) + + db_job = Job(**job_data) + + db.add(db_job) + db.commit() + db.refresh(db_job) + + return JobBase.model_validate(db_job) + + +def update_job(db: Session, job_id: int, job_in: JobUpdate) -> JobBase | None: + db_job = db.query(Job).filter(Job.id == job_id).first() + if not db_job: + return None + + job_data = get_tranformed_job(db, job_in) + + for key, value in job_data.items(): + setattr(db_job, key, value) + + db.commit() + db.refresh(db_job) + return JobBase.model_validate(db_job) From be2145a21c11ac9b4151fb4f733d20af2d75a869 Mon Sep 17 00:00:00 2001 From: existentialcoder Date: Sun, 9 Nov 2025 19:12:25 +0100 Subject: [PATCH 2/2] Clean up hackathon files --- .github/workflows/verify.yml | 47 ----- .hackathon-start | 1 - verify-submission.js | 362 ----------------------------------- 3 files changed, 410 deletions(-) delete mode 100644 .github/workflows/verify.yml delete mode 100644 .hackathon-start delete mode 100644 verify-submission.js diff --git a/.github/workflows/verify.yml b/.github/workflows/verify.yml deleted file mode 100644 index 7bbd4e0..0000000 --- a/.github/workflows/verify.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Verify Hackathon Submission - -on: - push: - branches: [ main, master ] - pull_request: - branches: [ main, master ] - workflow_dispatch: - -jobs: - verify: - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v3 - with: - fetch-depth: 0 # Fetch all history for commit verification - - - name: Setup Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Run verification script - run: node verify-submission.js - continue-on-error: true - - - name: Check for timestamp file - run: | - if [ -f .hackathon-start ]; then - echo "āœ… Timestamp file found" - cat .hackathon-start - else - echo "āš ļø Timestamp file not found - please create it!" - exit 0 - fi - - - name: Commit count - run: | - COMMIT_COUNT=$(git log --oneline --all --grep="acta-hackathon-setup" --invert-grep | wc -l | xargs) - echo "Participant commits: $COMMIT_COUNT (excluding acta-hackathon-setup)" - if [ $COMMIT_COUNT -ge 5 ]; then - echo "āœ… Minimum commit count met" - else - echo "āš ļø Consider making more commits (current: $COMMIT_COUNT, recommended: 5+)" - fi diff --git a/.hackathon-start b/.hackathon-start deleted file mode 100644 index 4dcdde1..0000000 --- a/.hackathon-start +++ /dev/null @@ -1 +0,0 @@ -Sat Oct 4 12:45:38 CEST 2025 diff --git a/verify-submission.js b/verify-submission.js deleted file mode 100644 index 8b7ad37..0000000 --- a/verify-submission.js +++ /dev/null @@ -1,362 +0,0 @@ -#!/usr/bin/env node - -/** - * ACTA Hackathon Submission Verification Script - * - * This script verifies that your submission meets all requirements - * and helps ensure you won't be flagged for anti-cheating violations. - * - * Run before submitting: node verify-submission.js - */ - -const { execSync } = require('child_process'); -const fs = require('fs'); -const path = require('path'); - -// Hackathon timing (CET timezone) -const HACKATHON_START = new Date('2025-10-04T12:00:00+02:00'); // Oct 4, 12:00 CET -const HACKATHON_END = new Date('2025-10-05T12:00:00+02:00'); // Oct 5, 12:00 CET - -const MINIMUM_COMMITS = 5; -const INITIAL_TIMESTAMP_FILE = '.hackathon-start'; - -// Colors for terminal output -const colors = { - reset: '\x1b[0m', - bright: '\x1b[1m', - red: '\x1b[31m', - green: '\x1b[32m', - yellow: '\x1b[33m', - blue: '\x1b[34m', - magenta: '\x1b[35m' -}; - -function log(message, color = colors.reset) { - console.log(`${color}${message}${colors.reset}`); -} - -function header(message) { - log(`\n${'='.repeat(60)}`, colors.bright); - log(message, colors.bright + colors.blue); - log('='.repeat(60), colors.bright); -} - -function checkmark(passed, message) { - const symbol = passed ? 'āœ…' : 'āŒ'; - const color = passed ? colors.green : colors.red; - log(`${symbol} ${message}`, color); - return passed; -} - -function warning(message) { - log(`āš ļø ${message}`, colors.yellow); -} - -function getGitCommits() { - try { - const output = execSync('git log --pretty=format:"%H|%aI|%s"', { encoding: 'utf-8' }); - return output.split('\n') - .filter(line => line.trim()) - .map(line => { - const [hash, date, message] = line.split('|'); - return { - hash, - date: new Date(date), - message - }; - }) - .filter(commit => commit.message !== 'acta-hackathon-setup') // Exclude setup commit - .reverse(); // Chronological order - } catch (error) { - return []; - } -} - -function checkInitialTimestamp() { - header('Checking Initial Timestamp'); - - const timestampPath = path.join(process.cwd(), INITIAL_TIMESTAMP_FILE); - const exists = fs.existsSync(timestampPath); - - if (!checkmark(exists, `Initial timestamp file (${INITIAL_TIMESTAMP_FILE}) exists`)) { - log('\n Create it now with:', colors.yellow); - log(` date > ${INITIAL_TIMESTAMP_FILE}`, colors.yellow); - log(` git add ${INITIAL_TIMESTAMP_FILE}`, colors.yellow); - log(` git commit -m "šŸŽÆ Starting ACTA Hackathon - $(date)"`, colors.yellow); - return false; - } - - try { - const content = fs.readFileSync(timestampPath, 'utf-8'); - log(`\n Timestamp content: ${content.trim()}`, colors.blue); - } catch (error) { - warning('Could not read timestamp file content'); - } - - return true; -} - -function checkCommitTiming(commits) { - header('Checking Commit Timing'); - - if (commits.length === 0) { - checkmark(false, 'Found commits in repository'); - log('\n Make sure you have committed your code!', colors.yellow); - log(' (Note: "acta-hackathon-setup" commit is excluded from checks)', colors.blue); - return false; - } - - checkmark(true, `Found ${commits.length} participant commits`); - - const firstCommit = commits[0]; - const lastCommit = commits[commits.length - 1]; - - log(`\n First commit: ${firstCommit.date.toISOString()}`, colors.blue); - log(` Last commit: ${lastCommit.date.toISOString()}`, colors.blue); - - const startedOnTime = firstCommit.date >= HACKATHON_START; - const finishedOnTime = lastCommit.date <= HACKATHON_END; - - checkmark( - startedOnTime, - `First commit after hackathon start (Oct 4, 12:00 CET)` - ); - - if (!startedOnTime) { - const diff = Math.abs(firstCommit.date - HACKATHON_START) / 1000 / 60; - warning(`First commit was ${diff.toFixed(0)} minutes too early!`); - } - - checkmark( - finishedOnTime, - `Last commit before deadline (Oct 5, 12:00 CET)` - ); - - if (!finishedOnTime) { - const diff = (lastCommit.date - HACKATHON_END) / 1000 / 60; - warning(`Last commit was ${diff.toFixed(0)} minutes too late!`); - } - - return startedOnTime && finishedOnTime; -} - -function checkCommitFrequency(commits) { - header('Checking Commit Frequency'); - - // Filter to only commits during the hackathon window - const validCommits = commits.filter(c => - c.date >= HACKATHON_START && c.date <= HACKATHON_END - ); - - checkmark( - validCommits.length >= MINIMUM_COMMITS, - `At least ${MINIMUM_COMMITS} commits during hackathon (found ${validCommits.length})` - ); - - if (validCommits.length < MINIMUM_COMMITS) { - warning(`Make more commits! You need ${MINIMUM_COMMITS - validCommits.length} more.`); - return false; - } - - // Check commit distribution - const timeSpan = HACKATHON_END - HACKATHON_START; - const quarterSpan = timeSpan / 4; - - const quarters = [0, 1, 2, 3].map(q => { - const start = new Date(HACKATHON_START.getTime() + q * quarterSpan); - const end = new Date(HACKATHON_START.getTime() + (q + 1) * quarterSpan); - return validCommits.filter(c => c.date >= start && c.date < end).length; - }); - - const emptyQuarters = quarters.filter(c => c === 0).length; - - if (emptyQuarters <= 1) { - checkmark(true, 'Commits well distributed throughout hackathon'); - } else { - warning('Commits are bunched together - this may look suspicious'); - log(' Consider committing more regularly throughout the event', colors.yellow); - } - - // Check for suspicious single massive commit - if (validCommits.length === 1) { - warning('Only 1 commit found - this will likely be flagged!'); - log(' Make multiple commits as you build features', colors.yellow); - return false; - } - - return true; -} - -function checkCommitMessages(commits) { - header('Checking Commit Messages'); - - // Only check commits during the hackathon window - const validCommits = commits.filter(c => - c.date >= HACKATHON_START && c.date <= HACKATHON_END - ); - - if (validCommits.length === 0) { - warning('No commits found during hackathon window'); - return false; - } - - const poorMessages = validCommits.filter(c => - c.message.length < 5 || - /^(wip|fix|update|stuff|temp|test)$/i.test(c.message.trim()) - ); - - if (poorMessages.length > validCommits.length / 2) { - warning('Many commits have poor quality messages'); - log(' Good commit messages help demonstrate your progress', colors.yellow); - } else { - checkmark(true, 'Commit messages look reasonable'); - } - - log('\n Hackathon commits:', colors.blue); - validCommits.slice(-5).forEach(c => { - log(` ${c.date.toLocaleString('en-US', { timeZone: 'Europe/Paris' })} - ${c.message}`, colors.reset); - }); - - return true; -} - -function checkGitHistory() { - header('Checking Git History Integrity'); - - try { - // Check for force pushes or history rewrites - execSync('git reflog --all', { encoding: 'utf-8' }); - checkmark(true, 'Git history appears intact'); - - // Check for suspicious operations - const reflog = execSync('git reflog', { encoding: 'utf-8' }); - const hasRebase = reflog.includes('rebase'); - const hasReset = reflog.includes('reset'); - - if (hasRebase || hasReset) { - warning('Detected rebase or reset operations'); - log(' If you had a good reason, be prepared to explain this', colors.yellow); - } - - return true; - } catch (error) { - warning('Could not verify git history'); - return false; - } -} - -function checkProjectFiles() { - header('Checking Project Files'); - - const hasReadme = fs.existsSync('README.md'); - checkmark(hasReadme, 'README.md exists'); - - if (hasReadme) { - const readme = fs.readFileSync('README.md', 'utf-8'); - const hasProjectInfo = readme.length > 1000 && !readme.includes('[SUBMISSION LINK TO BE ADDED]'); - - if (hasProjectInfo) { - checkmark(true, 'README appears to be updated with project info'); - } else { - warning('README might still be the template'); - log(' Update it with your project description!', colors.yellow); - } - } - - // Check for common project files - const files = fs.readdirSync(process.cwd()); - const hasCode = files.some(f => - f.endsWith('.js') || - f.endsWith('.ts') || - f.endsWith('.py') || - f.endsWith('.html') || - ['src', 'app', 'lib', 'pages'].includes(f) - ); - - checkmark(hasCode, 'Project code files detected'); - - return hasReadme && hasCode; -} - -function generateReport(results) { - header('VERIFICATION REPORT'); - - const allPassed = Object.values(results).every(v => v); - - if (allPassed) { - log('\nšŸŽ‰ All checks passed! Your submission looks good.', colors.green + colors.bright); - log('\nšŸ“‹ SUBMISSION REQUIREMENTS:', colors.blue + colors.bright); - log('Before submitting via https://forms.acta.so/r/wMobdM, ensure you have:', colors.blue); - log(''); - log(' 1. āœ… Public GitHub repository URL', colors.green); - log(' 2. āœ… 60-second demo video (Loom, YouTube) - MUST BE PUBLIC', colors.green); - log(' 3. āœ… Live public demo URL (deployed app)', colors.green); - log(' 4. āœ… Your email address', colors.green); - log(' 5. āœ… Your name', colors.green); - log(''); - log('āš ļø CRITICAL: All links must be PUBLIC and working!', colors.yellow + colors.bright); - log(' - Test your GitHub repo in an incognito window'); - log(' - Watch your video to ensure it plays publicly'); - log(' - Open your deployed app in incognito to verify it works'); - log(''); - log('šŸ“ Next steps:', colors.blue); - log('1. Deploy your app (see DEPLOYMENT_GUIDE.md)'); - log('2. Record your 60s demo video'); - log('3. Make GitHub repo PUBLIC (Settings → Change visibility)'); - log('4. Test all links in incognito/private browsing'); - log('5. Submit: https://forms.acta.so/r/wMobdM'); - log('6. Join Discord for winner announcements'); - } else { - log('\nāš ļø Some checks failed. Please fix the issues above.', colors.red + colors.bright); - log('\nFailed checks:', colors.red); - Object.entries(results).forEach(([check, passed]) => { - if (!passed) { - log(` āŒ ${check}`, colors.red); - } - }); - } - - log('\n' + '='.repeat(60), colors.bright); - log('šŸ† Submission: https://forms.acta.so/r/wMobdM', colors.magenta + colors.bright); - log('ā“ Questions? Discord or DM @acta.so on Instagram', colors.blue); - log('='.repeat(60) + '\n', colors.bright); - - return allPassed; -} - -// Main execution -function main() { - log('\n'); - log(' ā–ˆā–ˆā–ˆā–ˆā–ˆā•— ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•—ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•— ā–ˆā–ˆā–ˆā–ˆā–ˆā•— ā–ˆā–ˆā•— ā–ˆā–ˆā•— ā–ˆā–ˆā–ˆā–ˆā–ˆā•— ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•—ā–ˆā–ˆā•— ā–ˆā–ˆā•—', colors.magenta); - log(' ā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•—ā–ˆā–ˆā•”ā•ā•ā•ā•ā•ā•šā•ā•ā–ˆā–ˆā•”ā•ā•ā•ā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•— ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•—ā–ˆā–ˆā•”ā•ā•ā•ā•ā•ā–ˆā–ˆā•‘ ā–ˆā–ˆā•”ā•', colors.magenta); - log(' ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘ ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•‘ā–ˆā–ˆā•‘ ā–ˆā–ˆā–ˆā–ˆā–ˆā•”ā• ', colors.magenta); - log(' ā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•‘ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ ā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•‘ ā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•‘ā–ˆā–ˆā•”ā•ā•ā–ˆā–ˆā•‘ā–ˆā–ˆā•‘ ā–ˆā–ˆā•”ā•ā–ˆā–ˆā•— ', colors.magenta); - log(' ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ā•šā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•— ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ā–ˆā–ˆā•‘ ā–ˆā–ˆā•‘ā•šā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā•—ā–ˆā–ˆā•‘ ā–ˆā–ˆā•—', colors.magenta); - log(' ā•šā•ā• ā•šā•ā• ā•šā•ā•ā•ā•ā•ā• ā•šā•ā• ā•šā•ā• ā•šā•ā• ā•šā•ā• ā•šā•ā•ā•šā•ā• ā•šā•ā• ā•šā•ā•ā•ā•ā•ā•ā•šā•ā• ā•šā•ā•', colors.magenta); - log('\n Submission Verification Tool - 24h Hackathon\n', colors.bright); - - const commits = getGitCommits(); - - const results = { - 'Initial Timestamp': checkInitialTimestamp(), - 'Commit Timing': checkCommitTiming(commits), - 'Commit Frequency': checkCommitFrequency(commits), - 'Commit Messages': checkCommitMessages(commits), - 'Git History': checkGitHistory(), - 'Project Files': checkProjectFiles() - }; - - const allPassed = generateReport(results); - - process.exit(allPassed ? 0 : 1); -} - -// Check if git is available -try { - execSync('git --version', { encoding: 'utf-8' }); - main(); -} catch (error) { - log('āŒ Git is not installed or not available', colors.red); - log(' Please install Git and ensure this is a Git repository', colors.yellow); - process.exit(1); -}