diff --git a/.env.example b/.env.example index df65d36..66ad7c6 100644 --- a/.env.example +++ b/.env.example @@ -24,6 +24,9 @@ CORS_ORIGINS=http://localhost:3000 # Hytale Staging OAuth Credentials (optional, required for Hytale features) HYTALE_USE_STAGING=false +# JWT Secret (REQUIRED - for JWT token generation) +JWT_SECRET="your-jwt-secret-here" + # Sentry Error Tracking (optional) # DSN from: https://console.sentry.io/ # SENTRY_DSN=https://key@sentry.io/project diff --git a/CHANGELOG.md b/CHANGELOG.md index 5be46a6..e48ca97 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,29 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [0.2.2] - unreleased ### Added +- **Unified Database CLI Tool** - Consolidated shell scripts into cross-platform Go CLI + - Supports Windows, macOS, and Linux without native shell dependencies + - `db init` - Initialize database schema with interactive schema selection + - `db migrate` - Run specific schema migrations with validation + - `db reset` - Complete database reset with confirmation prompt + - `db list` - Display all available schema files with status + - Makefile integration with environment variable loading from `.env` +- **Consolidated Next.js API Routes to Go Backend** - Complete migration of frontend API routes to backend + - Moved all `/api/admin/` endpoints from Next.js to Go Fiber backend + - Admin user management: `GET/POST /api/admin/users`, `POST /api/admin/users/roles` + - Admin settings: `GET/POST /api/admin/settings`, `POST /api/admin/settings/test` + - GitHub repositories: `GET/POST/PUT/DELETE /api/admin/settings/repos` + - Discord webhooks: `GET/POST/PUT/PATCH/DELETE /api/admin/settings/webhooks` + - Admin sync controls: `GET/POST /api/admin/sync`, `GET /api/admin/sync/logs`, `GET/POST /api/admin/sync/settings` + - Admin servers: `GET /api/admin/servers` + - Bearer token authentication middleware for all admin routes + - Consistent error response format across all endpoints +- **Admin User Management API** - Complete user listing and management endpoints + - `GET /api/admin/users` - Paginated user listing with filtering, sorting, search + - Query parameters: page, pageSize, sortField, sortOrder, filter, search + - Returns paginated response with user data and statistics + - `POST /api/admin/users/roles` - Update user role assignments + - User statistics: totalUsers, migratedUsers, adminCount, activeCount - **Hytale Token Auto-Push to Pterodactyl** - Automatic environment variable updates for game servers - Game sessions can be linked to specific Pterodactyl servers via `server_id` field - Background worker automatically pushes refreshed tokens to Pterodactyl every 5 minutes @@ -18,6 +41,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Graceful degradation: Logs warnings but continues if Pterodactyl push fails ### Fixed +- **Admin Users Data Type Handling** - Fixed TIMESTAMP column scanning from PostgreSQL + - Changed timestamp handling to use `time.Time` objects with RFC3339 formatting + - Properly convert database TIMESTAMP columns to ISO 8601 string format in API responses + - Fixed empty users array issue in admin panel users listing + - Proper null pointer handling for nullable timestamp fields (`lastLoginAt`, `emailVerifiedTime`) - **Server-Allocation Relationship Sync** - Fixed missing foreign key population - Added `Relationships` field to `PteroServer` struct to capture included allocations from API - `syncServers()` now properly updates `server_id` foreign key in `allocations` table diff --git a/Makefile b/Makefile index bdc9912..41460c5 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,12 @@ # NodeByte Backend Makefile # Usage: make [target] +# Load environment variables from .env file +ifneq (,$(wildcard .env)) + include .env + export $(shell sed 's/=.*//' .env) +endif + # Variables BINARY_NAME=nodebyte-backend MAIN_PATH=./cmd/api @@ -18,7 +24,7 @@ YELLOW= RED= NC= -.PHONY: all build run dev clean test lint fmt vet deps tidy docker-build docker-up docker-down docker-logs swagger help +.PHONY: all build build-tools run dev clean test lint fmt vet deps tidy docker-build docker-up docker-down docker-logs swagger help db-init db-migrate db-reset db-list # Default target all: build @@ -42,6 +48,13 @@ build-all: GOOS=darwin GOARCH=arm64 $(GO) build $(GOFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-darwin-arm64 $(MAIN_PATH) @echo "All builds complete" +# Build database tools +build-tools: + @echo "Building database tools..." + @if not exist "$(BUILD_DIR)" mkdir "$(BUILD_DIR)" + $(GO) build $(GOFLAGS) -o $(BUILD_DIR)/db ./cmd/db + @echo "Database tools built: $(BUILD_DIR)/db" + ## Run Commands # Run the application @@ -169,6 +182,37 @@ docker-clean: ## Database Commands +# Build database migration tools +build-db-tools: build-tools + +# Initialize fresh database with all schemas +db-init: build-tools + @echo "Initializing database..." + $(BUILD_DIR)/db init -database "$(DATABASE_URL)" + +# Run interactive migration +db-migrate: build-tools + @echo "Running database migration..." + $(BUILD_DIR)/db migrate -database "$(DATABASE_URL)" + +# Migrate specific schema +db-migrate-schema: build-tools + @if "$(SCHEMA)"=="" ( + @echo "Usage: make db-migrate-schema SCHEMA=schema_name.sql" + @exit /b 1 + ) + @echo "Migrating $(SCHEMA)..." + $(BUILD_DIR)/db migrate -database "$(DATABASE_URL)" -schema "$(SCHEMA)" + +# Reset database (DROP and recreate) - CAREFUL! +db-reset: build-tools + @echo "WARNING: This will DROP and recreate the database!" + $(BUILD_DIR)/db reset -database "$(DATABASE_URL)" + +# List available schemas +db-list: build-tools + $(BUILD_DIR)/db list + # Generate sqlc code (if using sqlc) sqlc: @echo "Generating sqlc code..." @@ -207,6 +251,7 @@ help: @echo "Build:" @echo " make build - Build the application" @echo " make build-all - Build for all platforms" + @echo " make build-tools - Build database tools" @echo "" @echo "Run:" @echo " make run - Build and run" @@ -222,6 +267,13 @@ help: @echo " make vet - Run go vet" @echo " make check - Run all checks" @echo "" + @echo "Database:" + @echo " make db-init - Initialize fresh database" + @echo " make db-migrate - Run interactive migration" + @echo " make db-migrate-schema SCHEMA=schema_name.sql - Migrate specific schema" + @echo " make db-reset - Reset database (DROP and recreate)" + @echo " make db-list - List available schemas" + @echo "" @echo "Dependencies:" @echo " make deps - Download dependencies" @echo " make tidy - Tidy dependencies" diff --git a/README.md b/README.md index b8262e2..992695b 100644 --- a/README.md +++ b/README.md @@ -707,6 +707,142 @@ docker-compose logs -f backend - Tests for business logic and API handlers - Code must pass: `gofmt`, `go vet`, `golangci-lint` +# Database Tools - Quick Reference + +## One-Time Setup + +```bash +cd backend +make build-tools +``` + +## Common Commands + +### Fresh Database +```bash +make db-init +``` + +### Add New Schemas (Interactive) +```bash +make db-migrate +# Then select schema numbers from menu (e.g., 14,15) +``` + +### Single Schema +```bash +make db-migrate-schema SCHEMA=schema_15_careers.sql +``` + +### Start Fresh +```bash +make db-reset +# Confirm by typing database name +``` + +### See Available Schemas +```bash +make db-list +``` + +## With Environment Variable + +```bash +export DATABASE_URL="postgresql://user:password@localhost:5432/nodebyte" + +# Then use commands normally +make db-init +make db-migrate +make db-reset +``` + +## Direct Binary Usage + +```bash +# All commands also work with the binary directly +./bin/db init -database "postgresql://user:password@localhost:5432/nodebyte" +./bin/db migrate -database "postgresql://user:password@localhost:5432/nodebyte" +./bin/db migrate -database "..." -schema schema_15_careers.sql +./bin/db reset -database "postgresql://user:password@localhost:5432/nodebyte" +./bin/db list +./bin/db help +``` + +## Development Workflow + +```bash +# Start fresh +make db-reset +# Confirm: nodebyte +# ✅ Database is now reset and initialized + +# Make changes, run tests +# ... + +# Add new schema during development +make db-migrate-schema SCHEMA=schema_16_new_feature.sql + +# Or choose from menu +make db-migrate +``` + +## Makefile Targets + +``` +db-init # Initialize fresh database +db-migrate # Interactive schema selection +db-migrate-schema # Migrate specific schema (SCHEMA=name) +db-reset # Drop and recreate database +db-list # List available schemas +build-tools # Build database tool +``` + +## Common Issues + +**Tool not found?** +```bash +make build-tools +``` + +**Wrong database connected?** +```bash +export DATABASE_URL="postgresql://user:password@correct-host/correct-db" +make db-migrate +``` + +**Need to start over?** +```bash +make db-reset +# Type database name to confirm +# Database is now fresh with all 15 schemas +``` + +## Environment Variables + +| Variable | Purpose | Default | +|----------|---------|---------| +| `DATABASE_URL` | PostgreSQL connection string | (none) | +| `SCHEMA` | Used with `db-migrate-schema` | (none) | + +## More Information + +- **Full Guide**: See `DATABASE_TOOLS.md` +- **Implementation**: See `DATABASE_IMPLEMENTATION.md` +- **Schema Details**: See `schemas/README.md` + +--- + +**Quick Test:** +```bash +make build-tools && make db-list +``` + +**Help:** +```bash +make help +./bin/db help +``` + ### Pre-Commit Checks Before pushing, ensure code passes all checks: diff --git a/cmd/api/main.go b/cmd/api/main.go index e5d6f41..57d4942 100644 --- a/cmd/api/main.go +++ b/cmd/api/main.go @@ -140,9 +140,10 @@ func main() { Format: "[${time}] ${status} - ${latency} ${method} ${path}\n", })) app.Use(cors.New(cors.Config{ - AllowOrigins: cfg.CORSOrigins, - AllowHeaders: "Origin, Content-Type, Accept, Authorization, X-API-Key", - AllowMethods: "GET, POST, PUT, DELETE, OPTIONS", + AllowOrigins: cfg.CORSOrigins, + AllowHeaders: "Origin, Content-Type, Accept, Authorization, X-API-Key", + AllowMethods: "GET, POST, PUT, DELETE, OPTIONS, PATCH", + AllowCredentials: true, })) // API key middleware for protected routes diff --git a/cmd/db/main.go b/cmd/db/main.go new file mode 100644 index 0000000..9f29580 --- /dev/null +++ b/cmd/db/main.go @@ -0,0 +1,413 @@ +package main + +import ( + "bufio" + "context" + "flag" + "fmt" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/jackc/pgx/v5" +) + +var schemas = []string{ + "schema_01_users_auth.sql", + "schema_02_pterodactyl_sync.sql", + "schema_03_servers.sql", + "schema_04_billing.sql", + "schema_05_support_tickets.sql", + "schema_06_discord_webhooks.sql", + "schema_07_sync_logs.sql", + "schema_08_config.sql", + "schema_09_hytale.sql", + "schema_10_hytale_audit.sql", + "schema_11_hytale_server_logs.sql", + "schema_12_server_subusers.sql", + "schema_13_hytale_server_link.sql", + "schema_14_partners.sql", + "schema_15_careers.sql", +} + +func main() { + if len(os.Args) < 2 { + printUsage() + os.Exit(1) + } + + command := os.Args[1] + args := os.Args[2:] + + switch command { + case "init": + cmdInit(args) + case "migrate": + cmdMigrate(args) + case "reset": + cmdReset(args) + case "list": + cmdList() + case "help": + printUsage() + default: + fmt.Printf("❌ Unknown command: %s\n", command) + fmt.Println() + printUsage() + os.Exit(1) + } +} + +func printUsage() { + fmt.Print(`NodeByte Database Tool + +Usage: db [options] + +Commands: + init Initialize a fresh database with all schemas + migrate Migrate specific or all schemas to an existing database + reset Reset database (DROP and recreate) - USE WITH CAUTION + list List all available schemas + help Show this help message + +Examples: + # Initialize fresh database + db init -database "postgresql://user:pass@localhost/nodebyte" + + # Migrate all schemas + db migrate -database "postgresql://user:pass@localhost/nodebyte" + + # Migrate specific schema + db migrate -database "postgresql://user:pass@localhost/nodebyte" -schema schema_14_partners.sql + + # Reset database (destructive - requires confirmation) + db reset -database "postgresql://user:pass@localhost/nodebyte" + +Environment Variables: + DATABASE_URL - Default database connection string +`) +} + +func cmdInit(args []string) { + fs := flag.NewFlagSet("init", flag.ExitOnError) + databaseURL := fs.String("database", os.Getenv("DATABASE_URL"), "PostgreSQL connection string") + fs.Parse(args) + + if *databaseURL == "" { + fmt.Println("❌ Error: No database URL provided") + fmt.Println("Use: db init -database ") + os.Exit(1) + } + + fmt.Println("============================================================================") + fmt.Println("NodeByte Database Initialization") + fmt.Println("============================================================================") + fmt.Println() + + conn, err := pgx.Connect(context.Background(), *databaseURL) + if err != nil { + fmt.Printf("❌ Error connecting to database: %v\n", err) + os.Exit(1) + } + defer conn.Close(context.Background()) + + fmt.Println("✅ Connected to database") + fmt.Println() + fmt.Printf("📦 Initializing database with %d schemas...\n", len(schemas)) + fmt.Println() + + schemasDir := findSchemasDir() + if schemasDir == "" { + fmt.Println("❌ Error: schemas directory not found") + os.Exit(1) + } + + for _, schema := range schemas { + if err := migrateSchema(conn, schemasDir, schema); err != nil { + fmt.Printf("❌ %s\n Error: %v\n", schema, err) + os.Exit(1) + } + fmt.Printf("✅ %s\n", schema) + } + + fmt.Println() + fmt.Println("============================================================================") + fmt.Println("✅ Database initialization complete!") + fmt.Println("============================================================================") + fmt.Println() +} + +func cmdMigrate(args []string) { + fs := flag.NewFlagSet("migrate", flag.ExitOnError) + databaseURL := fs.String("database", os.Getenv("DATABASE_URL"), "PostgreSQL connection string") + schemaFile := fs.String("schema", "", "Specific schema file to migrate (optional)") + fs.Parse(args) + + if *databaseURL == "" { + fmt.Println("❌ Error: No database URL provided") + fmt.Println("Use: db migrate -database [-schema ]") + os.Exit(1) + } + + conn, err := pgx.Connect(context.Background(), *databaseURL) + if err != nil { + fmt.Printf("❌ Error connecting to database: %v\n", err) + os.Exit(1) + } + defer conn.Close(context.Background()) + + fmt.Println("✅ Connected to database") + fmt.Println() + + schemasDir := findSchemasDir() + if schemasDir == "" { + fmt.Println("❌ Error: schemas directory not found") + os.Exit(1) + } + + if *schemaFile != "" { + // Migrate specific schema + fmt.Printf("📦 Migrating: %s\n", *schemaFile) + fmt.Println() + if err := migrateSchema(conn, schemasDir, *schemaFile); err != nil { + fmt.Printf("❌ Failed to migrate: %s\n", *schemaFile) + fmt.Printf(" Error: %v\n", err) + os.Exit(1) + } + fmt.Printf("✅ Successfully migrated: %s\n", *schemaFile) + } else { + // Interactive menu + printMenu() + selection := readSelection() + + if selection == "0" { + fmt.Println("Exiting...") + return + } + + if selection == "" { + // Migrate all + fmt.Println("Migrating all schemas...") + fmt.Println() + migrateAll(conn, schemasDir) + } else { + // Migrate selected + migrateSelected(conn, schemasDir, selection) + } + + fmt.Println() + fmt.Println("============================================================================") + fmt.Println("✅ Migration complete") + fmt.Println("============================================================================") + fmt.Println() + } +} + +func cmdReset(args []string) { + fs := flag.NewFlagSet("reset", flag.ExitOnError) + databaseURL := fs.String("database", os.Getenv("DATABASE_URL"), "PostgreSQL connection string") + fs.Parse(args) + + if *databaseURL == "" { + fmt.Println("❌ Error: No database URL provided") + fmt.Println("Use: db reset -database ") + os.Exit(1) + } + + // Parse database name from connection string + connConfig, err := pgx.ParseConfig(*databaseURL) + if err != nil { + fmt.Printf("❌ Error parsing database URL: %v\n", err) + os.Exit(1) + } + + dbName := connConfig.Database + + // Confirm before resetting + fmt.Printf("⚠️ WARNING: This will DROP and recreate the database '%s'\n", dbName) + fmt.Print("Are you SURE? Type the database name to confirm: ") + reader := bufio.NewReader(os.Stdin) + input, _ := reader.ReadString('\n') + input = strings.TrimSpace(input) + + if input != dbName { + fmt.Println("❌ Confirmation failed. Database not reset.") + os.Exit(1) + } + + // Connect to postgres (not the target database) + pgConfig := *connConfig + pgConfig.Database = "postgres" + conn, err := pgx.ConnectConfig(context.Background(), &pgConfig) + if err != nil { + fmt.Printf("❌ Error connecting to PostgreSQL: %v\n", err) + os.Exit(1) + } + defer conn.Close(context.Background()) + + fmt.Println() + fmt.Printf("🔄 Dropping database '%s'...\n", dbName) + _, err = conn.Exec(context.Background(), fmt.Sprintf("DROP DATABASE IF EXISTS %s;", dbName)) + if err != nil { + fmt.Printf("❌ Error dropping database: %v\n", err) + os.Exit(1) + } + + fmt.Printf("🔄 Creating database '%s'...\n", dbName) + _, err = conn.Exec(context.Background(), fmt.Sprintf("CREATE DATABASE %s;", dbName)) + if err != nil { + fmt.Printf("❌ Error creating database: %v\n", err) + os.Exit(1) + } + + // Now connect to the new database and initialize + newConn, err := pgx.Connect(context.Background(), *databaseURL) + if err != nil { + fmt.Printf("❌ Error connecting to new database: %v\n", err) + os.Exit(1) + } + defer newConn.Close(context.Background()) + + fmt.Println() + fmt.Printf("📦 Initializing database with %d schemas...\n", len(schemas)) + fmt.Println() + + schemasDir := findSchemasDir() + if schemasDir == "" { + fmt.Println("❌ Error: schemas directory not found") + os.Exit(1) + } + + for _, schema := range schemas { + if err := migrateSchema(newConn, schemasDir, schema); err != nil { + fmt.Printf("❌ %s\n Error: %v\n", schema, err) + os.Exit(1) + } + fmt.Printf("✅ %s\n", schema) + } + + fmt.Println() + fmt.Println("============================================================================") + fmt.Println("✅ Database reset and initialization complete!") + fmt.Println("============================================================================") + fmt.Println() +} + +func cmdList() { + fmt.Println("Available schemas:") + fmt.Println() + for i, schema := range schemas { + fmt.Printf(" [%2d] %s\n", i+1, schema) + } + fmt.Println() +} + +func printMenu() { + fmt.Println("============================================================================") + fmt.Println("NodeByte Schema Migration") + fmt.Println("============================================================================") + fmt.Println() + fmt.Println("Available schemas:") + fmt.Println() + + for i, schema := range schemas { + fmt.Printf(" [%d] %s\n", i+1, schema) + } + fmt.Println(" [0] Exit") + fmt.Println() + fmt.Println("Which schema(s) would you like to migrate?") + fmt.Println("Enter schema numbers (comma-separated) or press Enter to migrate all:") +} + +func readSelection() string { + reader := bufio.NewReader(os.Stdin) + fmt.Print("> ") + input, _ := reader.ReadString('\n') + return strings.TrimSpace(input) +} + +func migrateSchema(conn *pgx.Conn, schemasDir, schemaFile string) error { + filePath := filepath.Join(schemasDir, schemaFile) + + if _, err := os.Stat(filePath); err != nil { + return fmt.Errorf("schema file not found: %s", filePath) + } + + sqlBytes, err := os.ReadFile(filePath) + if err != nil { + return err + } + + _, err = conn.Exec(context.Background(), string(sqlBytes)) + if err != nil { + return err + } + + return nil +} + +func migrateAll(conn *pgx.Conn, schemasDir string) { + for _, schema := range schemas { + migrateSingleQuiet(conn, schemasDir, schema) + } +} + +func migrateSelected(conn *pgx.Conn, schemasDir, selection string) { + selections := strings.Split(selection, ",") + + for _, sel := range selections { + sel = strings.TrimSpace(sel) + idx, err := strconv.Atoi(sel) + if err != nil || idx < 1 || idx > len(schemas) { + fmt.Printf("⚠️ Invalid selection: %s\n", sel) + continue + } + + schema := schemas[idx-1] + migrateSingleQuiet(conn, schemasDir, schema) + } +} + +func migrateSingleQuiet(conn *pgx.Conn, schemasDir, schema string) { + filePath := filepath.Join(schemasDir, schema) + + if _, err := os.Stat(filePath); err != nil { + fmt.Printf("❌ Schema file not found: %s\n", filePath) + return + } + + fmt.Printf("📦 %s ... ", schema) + + sqlBytes, err := os.ReadFile(filePath) + if err != nil { + fmt.Printf("❌\n Error: %v\n", err) + return + } + + _, err = conn.Exec(context.Background(), string(sqlBytes)) + if err != nil { + fmt.Printf("❌\n Error: %v\n", err) + return + } + + fmt.Println("✅") +} + +func findSchemasDir() string { + // Try different possible locations + possiblePaths := []string{ + "./schemas", // Current directory + "./backend/schemas", // Root directory + "../schemas", // Up one directory + filepath.Join(os.Getenv("PWD"), "schemas"), + } + + for _, path := range possiblePaths { + if info, err := os.Stat(path); err == nil && info.IsDir() { + return path + } + } + + return "" +} diff --git a/cmd/migrate/main.go b/cmd/migrate/main.go new file mode 100644 index 0000000..ae0d8e8 --- /dev/null +++ b/cmd/migrate/main.go @@ -0,0 +1,201 @@ +package main + +import ( + "bufio" + "context" + "flag" + "fmt" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/jackc/pgx/v5" +) + +var schemas = []string{ + "schema_01_users_auth.sql", + "schema_02_pterodactyl_sync.sql", + "schema_03_servers.sql", + "schema_04_billing.sql", + "schema_05_support_tickets.sql", + "schema_06_discord_webhooks.sql", + "schema_07_sync_logs.sql", + "schema_08_config.sql", + "schema_09_hytale.sql", + "schema_10_hytale_audit.sql", + "schema_11_hytale_server_logs.sql", + "schema_12_server_subusers.sql", + "schema_13_hytale_server_link.sql", + "schema_14_partners.sql", + "schema_15_careers.sql", +} + +func main() { + databaseURL := flag.String("database", os.Getenv("DATABASE_URL"), "PostgreSQL connection string") + schemaFile := flag.String("schema", "", "Specific schema file to migrate (optional)") + flag.Parse() + + if *databaseURL == "" { + fmt.Println("❌ Error: No database URL provided") + fmt.Println("Usage: migrate -database [-schema ]") + fmt.Println() + fmt.Println("Examples:") + fmt.Println(" migrate -database \"postgresql://user:password@localhost:5432/nodebyte\"") + fmt.Println(" migrate -database \"postgresql://user:password@localhost:5432/nodebyte\" -schema schema_01_users_auth.sql") + fmt.Println() + fmt.Println("Environment variable: DATABASE_URL") + os.Exit(1) + } + + // Get schemas directory + schemasDir := filepath.Join(filepath.Dir(os.Args[0]), "..", "..", "schemas") + if info, err := os.Stat(schemasDir); err != nil || !info.IsDir() { + // Try relative to current working directory + schemasDir = "schemas" + if info, err := os.Stat(schemasDir); err != nil || !info.IsDir() { + fmt.Println("❌ Error: schemas directory not found") + os.Exit(1) + } + } + + // Connect to database + conn, err := pgx.Connect(context.Background(), *databaseURL) + if err != nil { + fmt.Printf("❌ Error connecting to database: %v\n", err) + os.Exit(1) + } + defer conn.Close(context.Background()) + + fmt.Println("✅ Connected to database") + + if *schemaFile != "" { + // Migrate specific schema + if err := migrateSchema(conn, schemasDir, *schemaFile); err != nil { + fmt.Printf("❌ Failed to migrate: %s\n", *schemaFile) + fmt.Printf(" Error: %v\n", err) + os.Exit(1) + } + fmt.Printf("✅ Successfully migrated: %s\n", *schemaFile) + } else { + // Interactive menu + printMenu() + selection := readSelection() + + if selection == "0" { + fmt.Println("Exiting...") + return + } + + if selection == "" { + // Migrate all + fmt.Println("Migrating all schemas...") + fmt.Println() + migrateAll(conn, schemasDir) + } else { + // Migrate selected + migrateSelected(conn, schemasDir, selection) + } + + fmt.Println() + fmt.Println("============================================================================") + fmt.Println("✅ Migration complete") + fmt.Println("============================================================================") + fmt.Println() + } +} + +func printMenu() { + fmt.Println("============================================================================") + fmt.Println("NodeByte Schema Migration") + fmt.Println("============================================================================") + fmt.Println() + fmt.Println("Available schemas:") + fmt.Println() + + for i, schema := range schemas { + fmt.Printf(" [%d] %s\n", i+1, schema) + } + fmt.Println(" [0] Exit") + fmt.Println() + fmt.Println("Which schema(s) would you like to migrate?") + fmt.Println("Enter schema numbers (comma-separated) or press Enter to migrate all:") +} + +func readSelection() string { + reader := bufio.NewReader(os.Stdin) + fmt.Print("> ") + input, _ := reader.ReadString('\n') + return strings.TrimSpace(input) +} + +func migrateSchema(conn *pgx.Conn, schemasDir, schemaFile string) error { + filePath := filepath.Join(schemasDir, schemaFile) + + if _, err := os.Stat(filePath); err != nil { + return fmt.Errorf("schema file not found: %s", filePath) + } + + fmt.Printf("📦 Migrating: %s\n", schemaFile) + fmt.Println() + + sqlBytes, err := os.ReadFile(filePath) + if err != nil { + return err + } + + _, err = conn.Exec(context.Background(), string(sqlBytes)) + if err != nil { + return err + } + + return nil +} + +func migrateAll(conn *pgx.Conn, schemasDir string) { + for _, schema := range schemas { + migrateSingleQuiet(conn, schemasDir, schema) + } +} + +func migrateSelected(conn *pgx.Conn, schemasDir, selection string) { + selections := strings.Split(selection, ",") + + for _, sel := range selections { + sel = strings.TrimSpace(sel) + idx, err := strconv.Atoi(sel) + if err != nil || idx < 1 || idx > len(schemas) { + fmt.Printf("⚠️ Invalid selection: %s\n", sel) + continue + } + + schema := schemas[idx-1] + migrateSingleQuiet(conn, schemasDir, schema) + } +} + +func migrateSingleQuiet(conn *pgx.Conn, schemasDir, schema string) { + filePath := filepath.Join(schemasDir, schema) + + if _, err := os.Stat(filePath); err != nil { + fmt.Printf("❌ Schema file not found: %s\n", filePath) + return + } + + fmt.Printf("📦 %s ... ", schema) + + sqlBytes, err := os.ReadFile(filePath) + if err != nil { + fmt.Printf("❌\n Error: %v\n", err) + return + } + + _, err = conn.Exec(context.Background(), string(sqlBytes)) + if err != nil { + fmt.Printf("❌\n Error: %v\n", err) + fmt.Println(" Run manually to see full error details") + return + } + + fmt.Println("✅") +} diff --git a/docs/HYTALE_API.md b/docs/HYTALE_API.md deleted file mode 100644 index 2e7c4ff..0000000 --- a/docs/HYTALE_API.md +++ /dev/null @@ -1,649 +0,0 @@ -## Overview - -This API provides complete OAuth 2.0 Device Code Flow (RFC 8628) authentication and game session management for Hytale servers. It handles token lifecycle management, automatic refresh, and session tracking. - -## Authentication Flow - -``` -┌─────────────────────────────────────────────────────────────┐ -│ 1. Request Device Code → Get user_code & verification_uri │ -├─────────────────────────────────────────────────────────────┤ -│ 2. User enters code at verification_uri on web browser │ -├─────────────────────────────────────────────────────────────┤ -│ 3. Poll token endpoint → Receive access_token & refresh_token -├─────────────────────────────────────────────────────────────┤ -│ 4. Get user profiles using access_token │ -├─────────────────────────────────────────────────────────────┤ -│ 5. Select profile → Bind to game session │ -├─────────────────────────────────────────────────────────────┤ -│ 6. Create game session → Receive session tokens │ -├─────────────────────────────────────────────────────────────┤ -│ (Automatic) Refresh tokens 5 min before expiry │ -└─────────────────────────────────────────────────────────────┘ -``` - -## Endpoints - -### 1. Request Device Code - -**Endpoint:** `POST /api/v1/hytale/oauth/device-code` - -Initiates OAuth device code flow. Returns a device code and verification URI for user browser authentication. - -**Rate Limit:** 5 requests per 15 minutes (per IP address) - -**Request Body:** -```json -{} -``` - -**Success Response (200):** -```json -{ - "device_code": "DE123456789ABCDEF", - "user_code": "AB12-CD34", - "verification_uri": "https://accounts.hytale.com/device", - "expires_in": 1800, - "interval": 5 -} -``` - -**Error Responses:** - -- **400 Bad Request** - Invalid request format - ```json - { - "code": "INVALID_REQUEST", - "message": "Device code request failed", - "status": 400 - } - ``` - -- **429 Too Many Requests** - Rate limit exceeded - ```json - { - "code": "RATE_LIMITED", - "message": "Too many requests. Please try again later.", - "status": 429, - "headers": { - "X-RateLimit-Limit": "5", - "X-RateLimit-Remaining": "0", - "X-RateLimit-Reset": "1705270800" - } - } - ``` - -**Flow Instructions:** -1. Send request to get device code -2. Display `user_code` to user (format: XX00-XX00) -3. Instruct user to visit `verification_uri` and enter the code -4. Proceed to polling (endpoint #2) - ---- - -### 2. Poll for Token - -**Endpoint:** `POST /api/v1/hytale/oauth/token` - -Polls Hytale OAuth server for authorization completion. Returns tokens once user authorizes. - -**Rate Limit:** 10 requests per 5 minutes (per account ID) - -**Request Body:** -```json -{ - "device_code": "DE123456789ABCDEF" -} -``` - -**Success Response (200):** -```json -{ - "access_token": "eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9...", - "refresh_token": "refresh_eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9...", - "expires_in": 3600, - "token_type": "Bearer", - "account_id": "550e8400-e29b-41d4-a716-446655440000" -} -``` - -**Error Responses:** - -- **400 Bad Request** - Invalid device code or missing fields - ```json - { - "code": "INVALID_DEVICE_CODE", - "message": "Device code not found or expired", - "status": 400 - } - ``` - -- **401 Unauthorized** - Device code still pending user authorization - ```json - { - "code": "AUTHORIZATION_PENDING", - "message": "Awaiting user authorization. Please try again in 5 seconds.", - "status": 401, - "retry_after": 5 - } - ``` - -- **403 Forbidden** - Session limit reached (no premium entitlement) - ```json - { - "code": "SESSION_LIMIT_EXCEEDED", - "message": "Account has reached concurrent session limit (100). Upgrade to unlimited_servers to remove this restriction.", - "status": 403, - "entitlement_required": "sessions.unlimited_servers" - } - ``` - -- **404 Not Found** - Device code expired or invalid - ```json - { - "code": "SESSION_NOT_FOUND", - "message": "Device code expired (30 min limit)", - "status": 404 - } - ``` - -**Polling Strategy:** -- Implement exponential backoff: start with 5s, increase by 1s each attempt (max 15s) -- Stop polling after 15 minutes (timeout) -- Handle 401 responses by retrying after `retry_after` seconds -- On 403 SESSION_LIMIT, inform user they need premium entitlement - ---- - -### 3. Refresh Access Token - -**Endpoint:** `POST /api/v1/hytale/oauth/refresh` - -Refreshes an expired or expiring access token using the refresh token (valid for 30 days). - -**Rate Limit:** 6 requests per 1 hour (per account ID) - -**Request Body:** -```json -{ - "refresh_token": "refresh_eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9..." -} -``` - -**Success Response (200):** -```json -{ - "access_token": "eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9...", - "refresh_token": "refresh_eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9...", - "expires_in": 3600, - "token_type": "Bearer" -} -``` - -**Error Responses:** - -- **401 Unauthorized** - Invalid or expired refresh token - ```json - { - "code": "UNAUTHORIZED", - "message": "Refresh token invalid or expired. Re-authenticate required.", - "status": 401 - } - ``` - -- **429 Too Many Requests** - Exceeded 6 refreshes per hour - ```json - { - "code": "RATE_LIMITED", - "message": "Token refresh limit exceeded", - "status": 429 - } - ``` - -**Notes:** -- This endpoint is automatically called by backend (every 5 minutes for OAuth tokens, every 10 minutes for game sessions) -- New refresh_token received on each successful refresh (old token invalidated) -- Keep refresh tokens secure - they grant 30-day access - ---- - -### 4. Get User Profiles - -**Endpoint:** `POST /api/v1/hytale/oauth/profiles` - -Retrieves all game profiles associated with the authenticated account. - -**Rate Limit:** 20 requests per 1 hour (per account ID) - -**Headers:** -``` -Authorization: Bearer -``` - -**Request Body:** -```json -{} -``` - -**Success Response (200):** -```json -{ - "account_id": "550e8400-e29b-41d4-a716-446655440000", - "profiles": [ - { - "uuid": "f47ac10b-58cc-4372-a567-0e02b2c3d479", - "username": "PlayerName", - "created_at": "2025-01-01T00:00:00Z" - }, - { - "uuid": "550e8400-e29b-41d4-a716-446655440001", - "username": "AltCharacter", - "created_at": "2025-06-15T12:30:00Z" - } - ] -} -``` - -**Error Responses:** - -- **401 Unauthorized** - Missing or invalid access token - ```json - { - "code": "UNAUTHORIZED", - "message": "Invalid or expired access token", - "status": 401 - } - ``` - -- **403 Forbidden** - Token is valid but lacks required scope - ```json - { - "code": "FORBIDDEN", - "message": "Token missing required scope: openid", - "status": 403 - } - ``` - -**Usage:** -- Call after successful token polling -- Display profiles to user for selection -- Proceed to endpoint #5 (select profile) - ---- - -### 5. Select Profile - -**Endpoint:** `POST /api/v1/hytale/oauth/select-profile` - -Binds a profile to the current session. Required before creating game session. - -**Rate Limit:** 20 requests per 1 hour (per account ID) - -**Headers:** -``` -Authorization: Bearer -``` - -**Request Body:** -```json -{ - "profile_uuid": "f47ac10b-58cc-4372-a567-0e02b2c3d479" -} -``` - -**Success Response (200):** -```json -{ - "account_id": "550e8400-e29b-41d4-a716-446655440000", - "profile_id": "f47ac10b-58cc-4372-a567-0e02b2c3d479", - "username": "PlayerName", - "selected_at": "2025-01-14T10:30:00Z" -} -``` - -**Error Responses:** - -- **400 Bad Request** - Invalid profile UUID format - ```json - { - "code": "INVALID_REQUEST", - "message": "profile_uuid must be a valid UUID", - "status": 400 - } - ``` - -- **401 Unauthorized** - Invalid access token - ```json - { - "code": "UNAUTHORIZED", - "message": "Invalid or expired access token", - "status": 401 - } - ``` - -- **404 Not Found** - Profile UUID doesn't belong to this account - ```json - { - "code": "SESSION_NOT_FOUND", - "message": "Profile not found for this account", - "status": 404 - } - ``` - ---- - -### 6. Create Game Session - -**Endpoint:** `POST /api/v1/hytale/oauth/game-session/new` - -Creates a new game session with session tokens. Valid for 1 hour from creation. - -**Rate Limit:** 20 requests per 1 hour (per account ID) - -**Headers:** -``` -Authorization: Bearer -``` - -**Request Body:** -```json -{ - "profile_uuid": "f47ac10b-58cc-4372-a567-0e02b2c3d479" -} -``` - -**Success Response (200):** -```json -{ - "session_id": "550e8400-e29b-41d4-a716-446655440002", - "account_id": "550e8400-e29b-41d4-a716-446655440000", - "profile_id": "f47ac10b-58cc-4372-a567-0e02b2c3d479", - "session_token": "eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9...", - "identity_token": "eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9...", - "expires_at": "2025-01-14T11:30:00Z", - "created_at": "2025-01-14T10:30:00Z" -} -``` - -**Token Contents:** -- `session_token`: Contains session_id, profile_id, expiry. Used to verify player identity on server. -- `identity_token`: Contains player email, username, and account info. Use for profile display. - -**Error Responses:** - -- **401 Unauthorized** - Invalid access token - ```json - { - "code": "UNAUTHORIZED", - "message": "Invalid or expired access token", - "status": 401 - } - ``` - -- **403 Forbidden** - Session limit exceeded - ```json - { - "code": "SESSION_LIMIT_EXCEEDED", - "message": "Account has reached concurrent session limit (100). Upgrade to unlimited_servers.", - "status": 403 - } - ``` - -- **404 Not Found** - Profile doesn't exist - ```json - { - "code": "SESSION_NOT_FOUND", - "message": "Profile not found", - "status": 404 - } - ``` - -**Notes:** -- Session tokens automatically refreshed every 10 minutes by backend -- Player can use `session_token` to authenticate with Hytale game servers -- Keep session tokens secret (equivalent to passwords) - ---- - -### 7. Refresh Game Session - -**Endpoint:** `POST /api/v1/hytale/oauth/game-session/refresh` - -Refreshes an active game session to extend its 1-hour lifetime. Only works within last 10 minutes before expiry. - -**Rate Limit:** 20 requests per 1 hour (per account ID) - -**Headers:** -``` -Authorization: Bearer -``` - -**Request Body:** -```json -{ - "session_id": "550e8400-e29b-41d4-a716-446655440002" -} -``` - -**Success Response (200):** -```json -{ - "session_id": "550e8400-e29b-41d4-a716-446655440002", - "session_token": "eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9...", - "identity_token": "eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9...", - "expires_at": "2025-01-14T12:30:00Z", - "refreshed_at": "2025-01-14T10:40:00Z" -} -``` - -**Error Responses:** - -- **400 Bad Request** - Session not yet eligible for refresh - ```json - { - "code": "INVALID_REQUEST", - "message": "Session cannot be refreshed until 10 minutes before expiry", - "status": 400 - } - ``` - -- **401 Unauthorized** - Invalid access token - ```json - { - "code": "UNAUTHORIZED", - "message": "Invalid or expired access token", - "status": 401 - } - ``` - -- **404 Not Found** - Session doesn't exist or is expired - ```json - { - "code": "SESSION_NOT_FOUND", - "message": "Session not found or expired", - "status": 404 - } - ``` - -**Notes:** -- Backend automatically calls this every 10 minutes -- Calling manually provides same result as automatic refresh -- Receiving new session_token invalidates previous token - ---- - -### 8. Terminate Game Session - -**Endpoint:** `POST /api/v1/hytale/oauth/game-session/delete` - -Terminates an active game session. Call on server shutdown or logout. - -**Rate Limit:** 20 requests per 1 hour (per account ID) - -**Headers:** -``` -Authorization: Bearer -``` - -**Request Body:** -```json -{ - "session_id": "550e8400-e29b-41d4-a716-446655440002" -} -``` - -**Success Response (200):** -```json -{ - "session_id": "550e8400-e29b-41d4-a716-446655440002", - "terminated_at": "2025-01-14T10:45:00Z", - "status": "deleted" -} -``` - -**Error Responses:** - -- **404 Not Found** - Session doesn't exist - ```json - { - "code": "SESSION_NOT_FOUND", - "message": "Session not found", - "status": 404 - } - ``` - -**Notes:** -- Sessions auto-expire after 1 hour if not refreshed -- Calling this endpoint forcibly terminates the session immediately -- Use when player disconnects from server - ---- - -## Token Validation - -All `session_token` and `identity_token` are JWT (JSON Web Tokens) signed with Ed25519 keys. - -### Signature Verification - -1. Fetch JWKS (public keys) from Hytale endpoint (cached hourly) -2. Extract JWT header to find `kid` (key ID) -3. Retrieve public key by `kid` from JWKS -4. Verify signature using Ed25519 verification -5. Decode payload and validate claims - -### Required Claims - -**session_token:** -- `sub` - Profile UUID -- `aud` - Should contain "sessions" -- `exp` - Expiration timestamp (Unix) -- `iat` - Issued-at timestamp (Unix) -- `session_id` - Custom claim with session identifier - -**identity_token:** -- `sub` - Account UUID -- `aud` - Should contain "identities" -- `exp` - Expiration timestamp (Unix) -- `email` - User email address -- `preferred_username` - Display name - -### Validation Example (Go) - -```go -import "github.com/nodebyte/backend/internal/hytale" - -validator := hytale.NewTokenValidator(jwksCache) - -// Validate session token -sessionClaims, err := validator.ValidateSessionToken(sessionTokenString) -if err != nil { - // Handle invalid token - log.Printf("Invalid session token: %v", err) - return -} - -profileID := sessionClaims.Sub // Use as player identifier -``` - ---- - -## Error Codes Reference - -| Code | HTTP | Description | Retry? | -|------|------|-------------|--------| -| INVALID_REQUEST | 400 | Invalid request format/parameters | No | -| INVALID_DEVICE_CODE | 400 | Device code invalid or expired | No - get new code | -| UNAUTHORIZED | 401 | Missing/invalid access token | Yes - refresh token | -| AUTHORIZATION_PENDING | 401 | User hasn't authorized yet | Yes - follow retry_after | -| FORBIDDEN | 403 | Valid token but insufficient permissions | No | -| SESSION_LIMIT_EXCEEDED | 403 | 100 concurrent session limit | No - user needs upgrade | -| SESSION_NOT_FOUND | 404 | Session/profile/code doesn't exist | No | -| ENDPOINT_NOT_FOUND | 404 | Invalid endpoint URL | No | -| RATE_LIMITED | 429 | Too many requests in time window | Yes - after X-RateLimit-Reset | -| SERVICE_ERROR | 500+ | Internal server error | Yes - with exponential backoff | - ---- - -## Rate Limiting - -All endpoints return rate limit headers: - -``` -X-RateLimit-Limit: 5 -X-RateLimit-Remaining: 3 -X-RateLimit-Reset: 1705270800 -``` - -- `X-RateLimit-Limit` - Maximum requests in window -- `X-RateLimit-Remaining` - Requests left -- `X-RateLimit-Reset` - Unix timestamp when limit resets - -**Per-Endpoint Limits:** -- Device Code: 5/15min (per IP) -- Token Poll: 10/5min (per account) -- Profiles: 20/hour (per account) -- Game Session: 20/hour (per account) - ---- - -## Audit Logging - -All OAuth operations are logged for compliance: - -- Token creation (timestamp, account, IP, user agent) -- Token refresh (when, which token, success/failure) -- Session operations (created/refreshed/deleted, when) -- Auth failures (reason, IP, timestamp) - -Logs accessible via admin dashboard for forensics and compliance investigations. - ---- - -## Best Practices - -1. **Token Storage** - - Store tokens in secure, encrypted database - - Never log tokens in plaintext - - Use HTTPS for all API calls - -2. **Refresh Strategy** - - Refresh tokens 5-10 minutes before expiry - - Implement exponential backoff on refresh failures - - Handle 401 by requesting new device code - -3. **Session Management** - - Create new session on each server login - - Terminate session on logout/server shutdown - - Handle 403 SESSION_LIMIT gracefully (show upgrade prompt) - -4. **Error Handling** - - Implement retry logic with backoff for 401, 429, 5xx - - Never retry on 400, 403, 404 - - Display user-friendly error messages (not technical details) - -5. **Security** - - Validate JWT signatures before trusting token claims - - Use HTTPS (never HTTP) - - Implement CSRF tokens if storing access tokens in cookies - - Never expose tokens in URLs or logs - diff --git a/docs/HYTALE_AUTH_FLOW.md b/docs/HYTALE_AUTH_FLOW.md deleted file mode 100644 index 78d3ff4..0000000 --- a/docs/HYTALE_AUTH_FLOW.md +++ /dev/null @@ -1,544 +0,0 @@ -# Hytale Authentication for Server Hosting - -**Welcome!** This guide explains how Hytale authentication works on NodeByte-hosted servers and how to set up your gaming environment. - -## Overview - -When you rent a Hytale server from NodeByte, we use secure OAuth 2.0 authentication (the same standard used by Google, Microsoft, and Apple). This means: - -- ✅ Your Hytale account stays secure -- ✅ No need to share passwords with us -- ✅ Easy multi-device access -- ✅ Automatic token refresh (you stay logged in) - -## How It Works (Simple Version) - -``` -1. You authorize NodeByte once via web browser - ↓ -2. We get permission to access your Hytale profile - ↓ -3. You select which character to play on the server - ↓ -4. You get a session token to join the game - ↓ -5. Session automatically stays fresh (no re-login needed) -``` - -**No credentials are ever shared.** Hytale verifies your identity directly. - ---- - -## Authentication Flows - -### Flow 1: Interactive Servers (Web Browser) - -**Use case:** Setting up a new server, managing multiple accounts - -**Step-by-step:** - -#### 1️⃣ Go to Your Server Dashboard -Visit your NodeByte control panel and navigate to your server settings. - -#### 2️⃣ Click "Connect Hytale Account" -You'll see a blue button to authorize access. - -#### 3️⃣ You'll See a Device Code -``` -📱 Device Code: AB12-CD34 -Please visit: https://accounts.hytale.com/device -and enter the code above -``` - -#### 4️⃣ Visit the Link in Your Browser -Open `https://accounts.hytale.com/device` in your browser (on any device). - -**Note:** You can do this on your phone while your PC is running the server! - -#### 5️⃣ Sign In to Hytale (If Not Already) -Enter your Hytale username/password if prompted. - -#### 6️⃣ Enter the Device Code -Paste `AB12-CD34` into the code field. - -#### 7️⃣ Click "Authorize" -Review permissions: -- ✅ Access your game profiles -- ✅ Create gaming sessions -- ✅ Check your account details - -Click the green "Authorize" button. - -#### 8️⃣ Done! ✅ -Your server is now connected to your Hytale account. Return to your dashboard. - ---- - -### Flow 2: Desktop Applications & Launchers - -**Use case:** Dedicated servers, Linux/Mac hosting, automated provisioning - -If using a desktop client or server launcher, it might use PKCE (Proof Key for Public Clients) flow: - -#### 1️⃣ Launch Your Game/Server Manager -Open the application that needs Hytale access. - -#### 2️⃣ Click "Login with Hytale" -The app will open your browser automatically. - -#### 3️⃣ Sign In & Authorize -Same as Flow 1 (device code flow), or the app may use a password flow instead. - -#### 4️⃣ Redirected Back to App -Once authorized, you're automatically logged in. - ---- - -### Flow 3: Automated/Headless Servers - -**Use case:** Dedicated Linux servers, 24/7 game servers, Cloud VPS - -For servers without a display/browser: - -#### 1️⃣ Get Initial Token (Device Code) -```bash -# Admin runs this command -curl -X POST http://your-server:3000/api/v1/hytale/oauth/device-code - -# Output: -# { -# "user_code": "AB12-CD34", -# "verification_uri": "https://accounts.hytale.com/device" -# } -``` - -#### 2️⃣ Authorize from Any Browser -Visit the `verification_uri` from any device and enter the code. - -#### 3️⃣ Server Gets Token Automatically -Once authorized, the server receives your access token. - -#### 4️⃣ Admin Stores Token Securely -```bash -# Server stores in encrypted config -HYTALE_REFRESH_TOKEN="refresh_eyJhbGc..." - -# Token automatically refreshes every 5-10 minutes -# No further action needed -``` - ---- - -## Token Lifecycle - -### Initial Token (Valid for 1 Hour) - -When you first authorize: -``` -Time: 0:00 You authorize - ↓ - 0:00 Server gets access token (1 hour expiry) - ↓ - 0:50 Server automatically refreshes token (before expiry) - ↓ - 1:00 Token expired (but we refreshed already!) - ↓ - 1:40 Another refresh happens - ↓ - Continues forever... you never have to re-auth! -``` - -### Refresh Token (Valid for 30 Days) - -The "refresh token" lets us extend your access: -- ✅ Stored securely on our servers -- ✅ Valid for 30 days -- ✅ Only used to get new access tokens -- ✅ Automatically rotated (new one each refresh) - -**Important:** If you don't log in for 30+ days, you'll need to re-authorize. - ---- - -## Join Your Server - -### Step 1: Launch Your Game - -Open the Hytale launcher (or your server's game client). - -### Step 2: Select Server - -From the server list, find your NodeByte-hosted server. - -### Step 3: Choose Your Character - -You'll see a list of all your Hytale characters: -``` -Select a character: -○ MyMainCharacter -○ MySecondCharacter -○ MyPvPCharacter -``` - -Pick the one you want to play with. - -### Step 4: Join Game 🎮 - -Click "Connect" and you'll instantly join the server! - -**Behind the scenes:** -1. Your character is verified against your Hytale account ✅ -2. Session tokens are generated (valid for 1 hour) -3. Session automatically refreshes every 10 minutes -4. You stay connected as long as you're playing - ---- - -## Multi-Account / Multi-Device Support - -### Using Multiple Characters - -You can connect any of your Hytale characters to the same server: - -1. Open server settings → "Authorize Profile" -2. Select different character from the list -3. Each character keeps their own game progress - -### Playing from Different Devices - -Your authorization works across devices: - -**Device 1 (Desktop):** Your main gaming PC -**Device 2 (Laptop):** While traveling -**Device 3 (Streaming):** Broadcasting on Twitch - -Each device can use the same account without re-authorizing! - -**How?** The refresh token works anywhere. Your session tokens are created per-device but share the same account credentials. - ---- - -## Troubleshooting - -### "Authorization Pending" (Page Won't Update) - -**Problem:** You entered the code but the page keeps saying "waiting for authorization" - -**Solution:** -1. Check you're on the correct device (where you entered the code) -2. Wait 5-10 seconds - it polls automatically -3. Refresh the original page manually if it's taking too long -4. If it times out after 15 minutes, get a new device code and try again - ---- - -### "Session Limit Exceeded" (403 Error) - -**Problem:** You see "This account has reached its concurrent session limit (100 sessions)" - -**This means:** -- Your Hytale account has too many active game sessions -- Default limit is 100 concurrent sessions -- Only applies to accounts without premium entitlement - -**Solution:** -1. **Terminate old sessions:** - - Log out from other servers - - Close other game windows - - Wait 1 hour for sessions to auto-expire - -2. **Upgrade your account:** - - Purchase the `sessions.unlimited_servers` entitlement - - After upgrade, no session limit applies - - Contact Hytale support for details - ---- - -### "Invalid or Expired Token" (401 Error) - -**Problem:** You see "Unauthorized - your token expired or is invalid" - -**Possible causes:** - -1. **30-day refresh token expired** - - You haven't logged in for 30+ days - - Solution: Re-authorize once (start with device code flow) - -2. **Your password changed** - - You changed your Hytale password - - Old tokens automatically invalidated (security feature) - - Solution: Re-authorize - -3. **Account compromised** - - Someone else may have accessed your account - - All tokens automatically revoked for safety - - Solution: Change password at accounts.hytale.com, then re-authorize here - -**Re-authorize in 2 steps:** -1. Go to server settings → "Reconnect Hytale Account" -2. Follow the device code flow (same as initial setup) - ---- - -### "Profile Not Found" (404 Error) - -**Problem:** "We can't find that character" - -**Possible causes:** - -1. **Character was deleted** - - Solution: Create a new character and select it - -2. **Using wrong account** - - You have multiple Hytale accounts - - Make sure you're signing in with the right one - - Solution: Sign in with different account at verification_uri - -3. **Rare account sync issue** - - Wait a few minutes and try again - - Contact NodeByte support if it persists - ---- - -### "The Server Isn't Responding" - -**Problem:** "Can't reach the game server" or "Connection refused" - -**This is likely a server issue, not auth. Check:** - -1. ✅ Is the server running? - - Visit your NodeByte control panel - - Check server status (should be green) - - Click "Start Server" if it's offline - -2. ✅ Is the server publicly accessible? - - Check firewall rules - - Verify port 25565 (or custom port) is open - - Check network connectivity - -3. ✅ Are you on the right server? - - Verify the server IP/name - - Check you're not behind a restrictive firewall/VPN - -**If you're still stuck:** Contact NodeByte support with your server ID. - ---- - -### "Rate Limited" (429 Error) - -**Problem:** "Too many requests - please slow down" - -**This means:** -- You're polling for tokens too frequently -- Default limits: 10 token polls per 5 minutes -- 6 token refreshes per hour - -**Solution:** -- Wait a few minutes -- Retry after the `X-RateLimit-Reset` time shown in error -- If using automation, implement exponential backoff - -**Don't worry:** Normal gameplay never hits these limits. Only happens with: -- Rapid script/API calls -- Aggressive automated testing - ---- - -## Security & Privacy - -### What NodeByte Stores - -When you authorize, we store (encrypted): - -✅ **Your access token** - Used to create game sessions -✅ **Refresh token** - Used to keep access valid for 30 days -✅ **Account ID** - Links to your Hytale account -✅ **Session history** - For compliance and debugging - -### What We Don't Store - -❌ Your Hytale password (we never see it) -❌ Your email address (Hytale keeps that) -❌ Your character location/progress (that's in-game data) - -### What Hytale Stores - -Hytale (the game studio) stores: -- Your character list -- Game progress -- Playtime statistics -- Server access logs - -This is standard for any online game. - -### How Tokens Are Protected - -1. **In Transit:** HTTPS encryption (locked padlock in browser) -2. **At Rest:** AES-256 encryption (military-grade) -3. **Access Control:** Only backend services can decrypt -4. **Audit Logging:** Every token operation is logged - -### Revoking Access - -Want to disconnect your Hytale account? - -1. Go to server settings -2. Click "Disconnect Hytale Account" -3. All tokens for this server are immediately deleted -4. You'll need to re-authorize to play again - -**Note:** This only affects this specific server. Your account on other servers continues to work. - ---- - -## Advanced: Manual Token Management - -### For Server Admins / Power Users - -If you're self-hosting or need manual control: - -#### Get a New Token - -```bash -# 1. Request device code -curl -X POST http://your-server:3000/api/v1/hytale/oauth/device-code - -# Response: -# { -# "device_code": "DE1234567890ABCDEF", -# "user_code": "XY12-AB34", -# "verification_uri": "https://accounts.hytale.com/device", -# "expires_in": 1800 -# } -``` - -#### Authorize - -Visit the `verification_uri` and enter the device code. - -#### Retrieve Tokens - -```bash -# After authorizing -curl -X POST http://your-server:3000/api/v1/hytale/oauth/token \ - -H "Content-Type: application/json" \ - -d '{"device_code": "DE1234567890ABCDEF"}' - -# Response: -# { -# "access_token": "eyJhbGc...", -# "refresh_token": "refresh_eyJhbGc...", -# "expires_in": 3600 -# } -``` - -#### Refresh Token Manually - -```bash -curl -X POST http://your-server:3000/api/v1/hytale/oauth/refresh \ - -H "Content-Type: application/json" \ - -d '{"refresh_token": "refresh_eyJhbGc..."}' -``` - -#### Create Game Session - -```bash -curl -X POST http://your-server:3000/api/v1/hytale/oauth/game-session/new \ - -H "Authorization: Bearer " \ - -H "Content-Type: application/json" \ - -d '{"profile_uuid": "f47ac10b-58cc-4372-a567-0e02b2c3d479"}' -``` - -**These endpoints require API key for security. Contact your server admin.** - ---- - -## Glossary - -| Term | Explanation | -|------|-------------| -| **OAuth** | Secure authentication standard (used by Google, Microsoft, Apple) | -| **Device Code** | A temporary code (XX00-XX00) you enter to authorize | -| **Access Token** | A password-like credential that's valid for 1 hour | -| **Refresh Token** | A longer-lived credential (30 days) used to get new access tokens | -| **Session Token** | A game session identifier valid for 1 hour, auto-refreshed | -| **Entitlement** | A special account feature/permission (e.g., unlimited_servers) | - ---- - -## Getting Help - -### Common Issues & Support - -**Issue:** Something isn't working -**Solution:** -1. Check this guide first (especially troubleshooting section) -2. Restart your game client -3. Restart your server -4. Clear browser cookies/cache - -**Still stuck?** Contact NodeByte support: -- **Email:** support@nodebyte.com -- **Discord:** https://discord.gg/nodebyte -- **Web:** https://nodebyte.com/support - -**Include in your support request:** -- Your server ID -- The error message (screenshot helps!) -- When the issue started -- What you were doing when it happened - -### Report a Security Issue - -If you find a vulnerability: -- **DO NOT** post publicly -- Email: security@nodebyte.com -- We'll respond within 24 hours - ---- - -## FAQ - -**Q: Do I need to re-authorize every time I play?** -A: No! Authorization works forever (until tokens expire after 30 days of inactivity, then you do it once more). - -**Q: Can my friend play on my server with their account?** -A: Yes! Each friend authorizes once, then can play any time. Each account is separate. - -**Q: What if I lose access to my Hytale account?** -A: Change your password at accounts.hytale.com, then re-authorize here. Old tokens are automatically revoked for security. - -**Q: Why do you need permission to access my profile?** -A: To verify which character you're playing and link it to your game progress on the server. - -**Q: Is my password ever stored?** -A: No, never. You sign in directly with Hytale. We only get tokens, never passwords. - -**Q: Can I see my authorization history?** -A: Yes! Server admins can view audit logs in the control panel (shows all auth events, timestamps, IPs). - -**Q: What happens when the game server shuts down?** -A: Tokens remain valid for 30 days. When server restarts, it automatically refreshes tokens. - -**Q: Can I use the same token on multiple servers?** -A: Yes, but each server manages its own tokens. For safety, each server gets separate tokens. - ---- - -## Next Steps - -1. ✅ **Authorize your account** using the device code flow -2. ✅ **Select your character** from the profile list -3. ✅ **Launch the game** and start playing! -4. ✅ **Your session will auto-refresh** - no action needed - -**Questions?** See the troubleshooting section or contact support. - -**Happy gaming!** 🎮 - ---- - -**Version:** 1.0.0 -**Last Updated:** January 14, 2026 -**Platform:** NodeByte Hytale Hosting - diff --git a/docs/HYTALE_DOWNLOADER_INTEGRATION.md b/docs/HYTALE_DOWNLOADER_INTEGRATION.md deleted file mode 100644 index f573d7f..0000000 --- a/docs/HYTALE_DOWNLOADER_INTEGRATION.md +++ /dev/null @@ -1,630 +0,0 @@ -## Overview - -The Hytale Downloader CLI automates downloading and updating Hytale server files. It integrates with NodeByte's OAuth authentication to provide seamless, secure server provisioning in CI/CD pipelines. - -**Download:** https://downloader.hytale.com/hytale-downloader.zip - -## Integration Points - -``` -┌──────────────────────────┐ -│ Provisioning Pipeline │ -│ (Terraform/Ansible) │ -└────────────┬─────────────┘ - │ - ├─→ 1. Request OAuth token from NodeByte API - │ (device code flow or refresh token) - │ - ├─→ 2. Pass token to Downloader CLI - │ (via environment variable or CLI flag) - │ - ├─→ 3. CLI authenticates with Hytale - │ (using token) - │ - └─→ 4. Downloads latest server version - Extracts and validates files -``` - -## Prerequisites - -- NodeByte backend API running (OAuth endpoints accessible) -- OAuth account with valid refresh token (30-day lifetime) -- Hytale Downloader CLI installed -- Bash/PowerShell for automation scripts -- curl or similar for API calls - -## Step 1: Obtain OAuth Tokens - -### Option A: Device Code Flow (First-Time Setup) - -For initial setup on a new server/CI environment: - -**Bash Example:** -```bash -#!/bin/bash - -# Request device code -DEVICE_RESPONSE=$(curl -s -X POST http://localhost:3000/api/v1/hytale/oauth/device-code) - -DEVICE_CODE=$(echo "$DEVICE_RESPONSE" | jq -r '.device_code') -USER_CODE=$(echo "$DEVICE_RESPONSE" | jq -r '.user_code') -VERIFICATION_URI=$(echo "$DEVICE_RESPONSE" | jq -r '.verification_uri') - -echo "🔐 Authorize at: $VERIFICATION_URI" -echo "📝 Enter code: $USER_CODE" -echo "" - -# Poll for token (with timeout) -TIMEOUT=900 # 15 minutes -INTERVAL=5 -ELAPSED=0 - -while [ $ELAPSED -lt $TIMEOUT ]; do - TOKEN_RESPONSE=$(curl -s -X POST http://localhost:3000/api/v1/hytale/oauth/token \ - -H "Content-Type: application/json" \ - -d "{\"device_code\": \"$DEVICE_CODE\"}") - - # Check if we got access_token - if echo "$TOKEN_RESPONSE" | jq -e '.access_token' > /dev/null 2>&1; then - ACCESS_TOKEN=$(echo "$TOKEN_RESPONSE" | jq -r '.access_token') - REFRESH_TOKEN=$(echo "$TOKEN_RESPONSE" | jq -r '.refresh_token') - - echo "✅ Authorization successful!" - echo "ACCESS_TOKEN=$ACCESS_TOKEN" >> .env.local - echo "REFRESH_TOKEN=$REFRESH_TOKEN" >> .env.local - break - fi - - sleep $INTERVAL - ELAPSED=$((ELAPSED + INTERVAL)) -done - -if [ $ELAPSED -ge $TIMEOUT ]; then - echo "❌ Authorization timeout (15 min exceeded)" - exit 1 -fi -``` - -**PowerShell Example:** -```powershell -# Request device code -$response = Invoke-RestMethod -Uri "http://localhost:3000/api/v1/hytale/oauth/device-code" ` - -Method Post -Headers @{"Content-Type" = "application/json"} - -$deviceCode = $response.device_code -$userCode = $response.user_code -$verificationUri = $response.verification_uri - -Write-Host "🔐 Authorize at: $verificationUri" -Write-Host "📝 Enter code: $userCode" -Write-Host "" - -# Poll for token (with timeout) -$timeout = 900 # 15 minutes -$interval = 5 -$elapsed = 0 - -while ($elapsed -lt $timeout) { - try { - $tokenResponse = Invoke-RestMethod -Uri "http://localhost:3000/api/v1/hytale/oauth/token" ` - -Method Post ` - -Headers @{"Content-Type" = "application/json"} ` - -Body "{`"device_code`": `"$deviceCode`"}" - - if ($tokenResponse.access_token) { - $accessToken = $tokenResponse.access_token - $refreshToken = $tokenResponse.refresh_token - - Write-Host "✅ Authorization successful!" - Add-Content -Path ".env.local" -Value "ACCESS_TOKEN=$accessToken" - Add-Content -Path ".env.local" -Value "REFRESH_TOKEN=$refreshToken" - break - } - } catch { - # 401 AUTHORIZATION_PENDING is expected until user authorizes - } - - Start-Sleep -Seconds $interval - $elapsed += $interval -} - -if ($elapsed -ge $timeout) { - Write-Host "❌ Authorization timeout (15 min exceeded)" - exit 1 -} -``` - -### Option B: Token Refresh (Automated CI/CD) - -For automated provisioning using stored refresh token: - -**Bash Example:** -```bash -#!/bin/bash - -# Load stored refresh token -REFRESH_TOKEN=$(cat .env.local | grep REFRESH_TOKEN | cut -d= -f2) - -if [ -z "$REFRESH_TOKEN" ]; then - echo "❌ No refresh token found. Run device code flow first." - exit 1 -fi - -# Refresh token (valid for 30 days) -TOKEN_RESPONSE=$(curl -s -X POST http://localhost:3000/api/v1/hytale/oauth/refresh \ - -H "Content-Type: application/json" \ - -d "{\"refresh_token\": \"$REFRESH_TOKEN\"}") - -if echo "$TOKEN_RESPONSE" | jq -e '.access_token' > /dev/null 2>&1; then - NEW_ACCESS_TOKEN=$(echo "$TOKEN_RESPONSE" | jq -r '.access_token') - NEW_REFRESH_TOKEN=$(echo "$TOKEN_RESPONSE" | jq -r '.refresh_token') - - # Update stored tokens - sed -i "s/ACCESS_TOKEN=.*/ACCESS_TOKEN=$NEW_ACCESS_TOKEN/" .env.local - sed -i "s/REFRESH_TOKEN=.*/REFRESH_TOKEN=$NEW_REFRESH_TOKEN/" .env.local - - echo "✅ Token refreshed successfully" - export ACCESS_TOKEN=$NEW_ACCESS_TOKEN -else - echo "❌ Token refresh failed" - echo "$TOKEN_RESPONSE" | jq . - exit 1 -fi -``` - -**PowerShell Example:** -```powershell -# Load stored refresh token -$env_content = Get-Content ".env.local" -Raw -$refreshToken = ($env_content | Select-String "REFRESH_TOKEN=(.+)").Matches[0].Groups[1].Value - -if (-not $refreshToken) { - Write-Host "❌ No refresh token found. Run device code flow first." - exit 1 -} - -# Refresh token -try { - $tokenResponse = Invoke-RestMethod -Uri "http://localhost:3000/api/v1/hytale/oauth/refresh" ` - -Method Post ` - -Headers @{"Content-Type" = "application/json"} ` - -Body "{`"refresh_token`": `"$refreshToken`"}" - - $newAccessToken = $tokenResponse.access_token - $newRefreshToken = $tokenResponse.refresh_token - - # Update stored tokens - (Get-Content ".env.local") -replace "ACCESS_TOKEN=.*", "ACCESS_TOKEN=$newAccessToken" | Set-Content ".env.local" - (Get-Content ".env.local") -replace "REFRESH_TOKEN=.*", "REFRESH_TOKEN=$newRefreshToken" | Set-Content ".env.local" - - Write-Host "✅ Token refreshed successfully" - $env:ACCESS_TOKEN = $newAccessToken -} catch { - Write-Host "❌ Token refresh failed" - exit 1 -} -``` - -## Step 2: Configure Downloader CLI - -### Environment Variables - -```bash -# Set these before running downloader CLI - -export HYTALE_TOKEN="your_access_token_here" -export HYTALE_SERVER_PATH="/opt/hytale-server" -export HYTALE_ENVIRONMENT="production" # or "staging" -``` - -### CLI Usage - -```bash -# Download latest server version -./hytale-downloader download \ - --token "$HYTALE_TOKEN" \ - --output "$HYTALE_SERVER_PATH" \ - --version latest - -# Verify downloaded files -./hytale-downloader verify \ - --path "$HYTALE_SERVER_PATH" - -# Extract files -./hytale-downloader extract \ - --input "$HYTALE_SERVER_PATH" \ - --output "$HYTALE_SERVER_PATH/extracted" -``` - -## Step 3: Integration Examples - -### Terraform Provisioning - -```hcl -# variables.tf -variable "nodebyte_api_url" { - default = "http://localhost:3000" -} - -variable "refresh_token" { - sensitive = true - # Loaded from environment: TF_VAR_refresh_token -} - -# main.tf -resource "null_resource" "download_hytale" { - provisioner "local-exec" { - command = <<-EOT - set -e - - # Refresh OAuth token - TOKEN_RESPONSE=$(curl -s -X POST "${var.nodebyte_api_url}/api/v1/hytale/oauth/refresh" \ - -H "Content-Type: application/json" \ - -d '{"refresh_token": "${var.refresh_token}"}') - - ACCESS_TOKEN=$(echo "$TOKEN_RESPONSE" | jq -r '.access_token') - - # Download server files - ./hytale-downloader download \ - --token "$ACCESS_TOKEN" \ - --output "/opt/hytale-server" \ - --version latest - - # Verify installation - if [ -f /opt/hytale-server/server.jar ]; then - echo "✅ Server files ready" - else - echo "❌ Download verification failed" - exit 1 - fi - EOT - } -} - -# terraform.tfvars or environment -# TF_VAR_refresh_token="refresh_eyJhbGc..." -``` - -### Ansible Playbook - -```yaml ---- -- name: Provision Hytale Server - hosts: new_servers - vars: - nodebyte_api_url: "http://nodebyte.example.com" - # Vault-encrypted refresh token - tasks: - - name: Refresh OAuth token - uri: - url: "{{ nodebyte_api_url }}/api/v1/hytale/oauth/refresh" - method: POST - body_format: json - body: - refresh_token: "{{ refresh_token }}" - register: token_response - changed_when: false - - - name: Extract access token - set_fact: - access_token: "{{ token_response.json.access_token }}" - new_refresh_token: "{{ token_response.json.refresh_token }}" - - - name: Update stored refresh token - copy: - content: "{{ new_refresh_token }}" - dest: "/etc/hytale/refresh_token" - mode: "0600" - - - name: Download Hytale server files - shell: | - ./hytale-downloader download \ - --token "{{ access_token }}" \ - --output "/opt/hytale-server" \ - --version latest - environment: - PATH: "/usr/local/bin:{{ ansible_env.PATH }}" - - - name: Verify installation - stat: - path: "/opt/hytale-server/server.jar" - register: server_jar - failed_when: not server_jar.stat.exists -``` - -### GitHub Actions CI/CD - -```yaml -name: Deploy Hytale Server - -on: - schedule: - - cron: "0 0 * * 0" # Weekly - workflow_dispatch: - -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Refresh OAuth token - id: token - run: | - RESPONSE=$(curl -s -X POST "${{ secrets.NODEBYTE_API_URL }}/api/v1/hytale/oauth/refresh" \ - -H "Content-Type: application/json" \ - -d "{\"refresh_token\": \"${{ secrets.HYTALE_REFRESH_TOKEN }}\"}") - - ACCESS_TOKEN=$(echo "$RESPONSE" | jq -r '.access_token') - NEW_REFRESH=$(echo "$RESPONSE" | jq -r '.refresh_token') - - echo "::set-output name=access_token::$ACCESS_TOKEN" - echo "::add-mask::$ACCESS_TOKEN" - - # Store new refresh token - echo "new_refresh=$NEW_REFRESH" >> $GITHUB_OUTPUT - - - name: Update refresh token secret - run: | - echo "TODO: Update GitHub Actions secret (requires API token)" - # This step would use GitHub API to update the secret - # Requires: gh auth token + gh secret set - - - name: Download Hytale server - run: | - wget https://downloader.hytale.com/hytale-downloader.zip - unzip hytale-downloader.zip - - ./hytale-downloader download \ - --token "${{ steps.token.outputs.access_token }}" \ - --output "./server" \ - --version latest - - - name: Verify and package - run: | - ./hytale-downloader verify --path "./server" - tar czf hytale-server.tar.gz server/ - - - name: Upload to artifact storage - run: | - aws s3 cp hytale-server.tar.gz s3://deployments/hytale/ -``` - -### Docker Build Integration - -```dockerfile -# Dockerfile -FROM ubuntu:22.04 - -# Install dependencies -RUN apt-get update && apt-get install -y \ - curl \ - jq \ - unzip - -# Download Downloader CLI -RUN wget https://downloader.hytale.com/hytale-downloader.zip && \ - unzip hytale-downloader.zip && \ - chmod +x hytale-downloader - -# Build arguments (passed from CI/CD) -ARG HYTALE_TOKEN -ARG HYTALE_VERSION=latest - -# Download server files -RUN ./hytale-downloader download \ - --token "$HYTALE_TOKEN" \ - --output "/opt/hytale-server" \ - --version "$HYTALE_VERSION" - -# Verify -RUN ./hytale-downloader verify --path "/opt/hytale-server" - -# Copy configuration -COPY server.properties /opt/hytale-server/ -COPY start.sh /opt/hytale-server/ - -# Set permissions -RUN chmod +x /opt/hytale-server/start.sh && \ - useradd -m hytale && \ - chown -R hytale:hytale /opt/hytale-server - -USER hytale -WORKDIR /opt/hytale-server - -EXPOSE 25565 - -CMD ["/opt/hytale-server/start.sh"] -``` - -**Build command:** -```bash -# Get fresh access token -TOKEN_RESPONSE=$(curl -s -X POST http://localhost:3000/api/v1/hytale/oauth/refresh \ - -H "Content-Type: application/json" \ - -d "{\"refresh_token\": \"$REFRESH_TOKEN\"}") - -ACCESS_TOKEN=$(echo "$TOKEN_RESPONSE" | jq -r '.access_token') - -# Build with token -docker build \ - --build-arg HYTALE_TOKEN="$ACCESS_TOKEN" \ - --build-arg HYTALE_VERSION="1.0.2" \ - -t hytale-server:latest . -``` - -## Step 4: Error Handling & Retries - -### Token Refresh Failures - -```bash -#!/bin/bash - -refresh_token_with_retry() { - local refresh_token=$1 - local max_attempts=3 - local attempt=1 - - while [ $attempt -le $max_attempts ]; do - RESPONSE=$(curl -s -w "\n%{http_code}" -X POST http://localhost:3000/api/v1/hytale/oauth/refresh \ - -H "Content-Type: application/json" \ - -d "{\"refresh_token\": \"$refresh_token\"}") - - HTTP_CODE=$(echo "$RESPONSE" | tail -n 1) - BODY=$(echo "$RESPONSE" | head -n -1) - - case $HTTP_CODE in - 200) - echo "$BODY" - return 0 - ;; - 401) - echo "❌ Refresh token expired or invalid" >&2 - echo " Need to re-run device code flow" >&2 - return 1 - ;; - 429) - RETRY_AFTER=$(echo "$BODY" | jq -r '.retry_after // 60') - echo "⏳ Rate limited. Waiting ${RETRY_AFTER}s..." >&2 - sleep $RETRY_AFTER - attempt=$((attempt + 1)) - ;; - *) - echo "❌ HTTP $HTTP_CODE" >&2 - sleep $((2 ** (attempt - 1))) # Exponential backoff - attempt=$((attempt + 1)) - ;; - esac - done - - echo "❌ Token refresh failed after $max_attempts attempts" >&2 - return 1 -} - -# Usage -if ! TOKEN_JSON=$(refresh_token_with_retry "$REFRESH_TOKEN"); then - exit 1 -fi - -ACCESS_TOKEN=$(echo "$TOKEN_JSON" | jq -r '.access_token') -``` - -### Downloader CLI Failures - -```bash -#!/bin/bash - -download_with_retry() { - local token=$1 - local output=$2 - local max_attempts=3 - local attempt=1 - - while [ $attempt -le $max_attempts ]; do - if ./hytale-downloader download \ - --token "$token" \ - --output "$output" \ - --version latest 2>&1; then - return 0 - fi - - DELAY=$((2 ** (attempt - 1))) - echo "⏳ Download failed, retrying in ${DELAY}s... (attempt $attempt/$max_attempts)" >&2 - sleep $DELAY - attempt=$((attempt + 1)) - done - - echo "❌ Download failed after $max_attempts attempts" >&2 - return 1 -} - -# Usage -if ! download_with_retry "$ACCESS_TOKEN" "/opt/hytale-server"; then - exit 1 -fi - -# Verify -if ! ./hytale-downloader verify --path "/opt/hytale-server"; then - echo "❌ Download verification failed" >&2 - exit 1 -fi -``` - -## Troubleshooting - -### "Refresh token expired or invalid" -- Refresh tokens valid for 30 days -- Need to re-run device code flow if expired -- Solution: Implement token rotation in CI/CD pipeline - -### "Rate limited (429)" -- Exceeded token refresh quota (6/hour) -- Wait for X-RateLimit-Reset time -- Use stored access tokens if available (valid 1 hour) - -### "Session limit exceeded (403)" -- Account reached 100 concurrent sessions -- Solution: User needs to upgrade to `sessions.unlimited_servers` entitlement -- Check `account_id` in token response - -### "Authorization pending" -- User hasn't entered device code yet -- Implementation should: retry after 5 seconds per RFC 8628 -- Check user has navigated to verification_uri - -### "Connection refused" -- NodeByte API not running -- Check API URL and firewall -- Verify OAuth endpoints accessible - -## Security Best Practices - -1. **Token Storage** - ```bash - # ✅ Secure storage - chmod 600 .env.local # Read/write for owner only - - # ❌ Avoid - # Don't commit tokens to git - # Don't print tokens in logs - # Don't pass via command line (visible in ps) - ``` - -2. **CI/CD Secrets** - ```yaml - # GitHub Actions - - name: Use token - env: - HYTALE_REFRESH_TOKEN: ${{ secrets.HYTALE_REFRESH_TOKEN }} - run: | - # Token masked from logs automatically - ./deploy.sh - ``` - -3. **Network Security** - - Use HTTPS (not HTTP) for all API calls - - Verify TLS certificates in production - - Restrict API access via firewall rules - -4. **Token Rotation** - - New refresh token on every refresh - - Old token automatically invalidated - - Store new token immediately - -5. **Audit Logging** - - All downloads logged to audit trail - - Check `GET /admin/audit-logs?account_id=` - - Review for suspicious activity - -## Summary - -The Hytale Downloader CLI integration enables: - -- ✅ Automated, secure server provisioning -- ✅ Integration into existing CI/CD pipelines -- ✅ Zero-touch updates via scheduled jobs -- ✅ Compliance-audited downloads -- ✅ Multi-stage/production support - -Refer to [HYTALE_AUTH_FLOW.md](HYTALE_AUTH_FLOW.md) for customer-facing authentication documentation and [HYTALE_GSP_API.md](HYTALE_GSP_API.md) for complete API reference. - diff --git a/docs/docs.go b/docs/docs.go index 22c1c28..96ba837 100644 --- a/docs/docs.go +++ b/docs/docs.go @@ -1071,6 +1071,891 @@ const docTemplate = `{ } } }, + "/api/v1/auth/check-email": { + "get": { + "description": "Checks if an email address is already registered in the system", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Check Email Exists", + "parameters": [ + { + "type": "string", + "description": "Email address to check", + "name": "email", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "Email availability status", + "schema": { + "type": "object", + "additionalProperties": true + } + }, + "400": { + "description": "Invalid email", + "schema": { + "type": "object", + "additionalProperties": true + } + } + } + } + }, + "/api/v1/auth/forgot-password": { + "post": { + "description": "Sends password reset email with reset token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Forgot Password", + "parameters": [ + { + "description": "User email", + "name": "email", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.ForgotPasswordRequest" + } + } + ], + "responses": { + "200": { + "description": "Reset email sent", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid email", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/login": { + "post": { + "description": "Authenticates a user with email and password, returns JWT tokens", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Login", + "parameters": [ + { + "description": "Login credentials", + "name": "credentials", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.CredentialsRequest" + } + } + ], + "responses": { + "200": { + "description": "Login successful with JWT tokens", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid request", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "401": { + "description": "Invalid credentials or email not verified", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/logout": { + "post": { + "description": "Invalidates refresh token and terminates user session", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Logout", + "parameters": [ + { + "description": "Optional refresh token to invalidate", + "name": "logout", + "in": "body", + "schema": { + "$ref": "#/definitions/handlers.LogoutRequest" + } + }, + { + "type": "string", + "example": "Bearer eyJhbGc...", + "description": "Bearer token", + "name": "Authorization", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Logged out successfully", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/magic-link": { + "post": { + "description": "Sends a passwordless authentication magic link to user's email", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Request Magic Link", + "parameters": [ + { + "description": "User email", + "name": "magicLink", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.MagicLinkRequest" + } + } + ], + "responses": { + "200": { + "description": "Magic link sent", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid email", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/magic-link/verify": { + "post": { + "description": "Verifies magic link token and authenticates user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Verify Magic Link", + "parameters": [ + { + "description": "Magic link token", + "name": "verify", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.MagicLinkVerifyRequest" + } + } + ], + "responses": { + "200": { + "description": "Authentication successful with JWT tokens", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid request", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "401": { + "description": "Invalid or expired token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/me": { + "get": { + "description": "Returns authenticated user information from JWT token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get Current User", + "parameters": [ + { + "type": "string", + "example": "Bearer eyJhbGc...", + "description": "Bearer token", + "name": "Authorization", + "in": "header", + "required": true + } + ], + "responses": { + "200": { + "description": "User data", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "401": { + "description": "Missing or invalid token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/refresh": { + "post": { + "description": "Exchanges a valid refresh token for new access and refresh tokens", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Refresh Access Token", + "parameters": [ + { + "description": "Refresh token", + "name": "refresh", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.RefreshTokenRequest" + } + } + ], + "responses": { + "200": { + "description": "New tokens generated", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Missing refresh token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "401": { + "description": "Invalid or expired refresh token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/register": { + "post": { + "description": "Registers a new user account and sends verification email", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Registration", + "parameters": [ + { + "description": "Registration details", + "name": "registration", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.RegisterUserRequest" + } + } + ], + "responses": { + "201": { + "description": "User registered successfully", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid request or validation error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "409": { + "description": "Email already exists", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/reset-password": { + "post": { + "description": "Resets user password using reset token from email", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Reset Password", + "parameters": [ + { + "description": "Reset token and new password", + "name": "reset", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.ResetPasswordRequest" + } + } + ], + "responses": { + "200": { + "description": "Password reset successfully", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid request or weak password", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found or invalid token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/users/{id}": { + "get": { + "description": "Retrieves user information by user ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get User By ID", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "User information", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/validate": { + "post": { + "description": "Validates user credentials without creating a session", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Validate Credentials", + "parameters": [ + { + "description": "Credentials to validate", + "name": "credentials", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.CredentialsValidateRequest" + } + } + ], + "responses": { + "200": { + "description": "Credentials are valid", + "schema": { + "type": "object", + "additionalProperties": true + } + }, + "400": { + "description": "Invalid request", + "schema": { + "type": "object", + "additionalProperties": true + } + }, + "401": { + "description": "Invalid credentials", + "schema": { + "type": "object", + "additionalProperties": true + } + } + } + } + }, + "/api/v1/auth/verify-email": { + "post": { + "description": "Verifies user email address with verification token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Verify Email", + "parameters": [ + { + "description": "Verification token", + "name": "verification", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "Email verified successfully", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid or missing token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/dashboard/account": { + "get": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Retrieves account information for the authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Get user account", + "responses": { + "200": { + "description": "Account info retrieved", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + }, + "put": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Updates account information for the authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Update user account", + "parameters": [ + { + "description": "Account update data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.UpdateUserAccountRequest" + } + } + ], + "responses": { + "200": { + "description": "Account updated", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "400": { + "description": "Invalid request", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + } + }, + "/api/v1/dashboard/account/password": { + "put": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Changes password for the authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Change user password", + "parameters": [ + { + "description": "Password change data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.ChangePasswordRequest" + } + } + ], + "responses": { + "200": { + "description": "Password changed", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "400": { + "description": "Invalid request", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized or wrong password", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + } + }, + "/api/v1/dashboard/servers": { + "get": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Retrieves paginated list of servers owned by the authenticated user with search and filtering", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Get user servers", + "parameters": [ + { + "type": "integer", + "default": 1, + "description": "Page number", + "name": "page", + "in": "query" + }, + { + "type": "integer", + "default": 12, + "description": "Items per page", + "name": "per_page", + "in": "query" + }, + { + "type": "string", + "description": "Search query", + "name": "search", + "in": "query" + }, + { + "type": "string", + "description": "Status filter", + "name": "status", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Servers retrieved", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + } + }, + "/api/v1/dashboard/stats": { + "get": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Retrieves statistics for the user's dashboard including server counts and recent servers", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Get dashboard stats", + "responses": { + "200": { + "description": "Dashboard stats retrieved", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + } + }, "/api/v1/email/queue": { "post": { "security": [ @@ -2315,6 +3200,93 @@ const docTemplate = `{ } }, "definitions": { + "auth.TokenPair": { + "type": "object", + "properties": { + "accessToken": { + "type": "string" + }, + "expiresIn": { + "description": "seconds until access token expires", + "type": "integer" + }, + "refreshToken": { + "type": "string" + }, + "tokenType": { + "type": "string" + } + } + }, + "handlers.AuthResponse": { + "type": "object", + "properties": { + "accessToken": { + "description": "For backward compatibility", + "type": "string" + }, + "error": { + "type": "string" + }, + "expiresIn": { + "description": "For backward compatibility", + "type": "integer" + }, + "message": { + "type": "string" + }, + "refreshToken": { + "description": "For backward compatibility", + "type": "string" + }, + "success": { + "type": "boolean" + }, + "token": { + "description": "Deprecated: use tokens instead", + "type": "string" + }, + "tokens": { + "$ref": "#/definitions/auth.TokenPair" + }, + "user": { + "$ref": "#/definitions/handlers.UserData" + } + } + }, + "handlers.ChangePasswordRequest": { + "type": "object", + "properties": { + "currentPassword": { + "type": "string" + }, + "newPassword": { + "type": "string" + } + } + }, + "handlers.CredentialsRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "handlers.CredentialsValidateRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, "handlers.DispatchWebhookRequest": { "type": "object", "properties": { @@ -2341,6 +3313,38 @@ const docTemplate = `{ } } }, + "handlers.ForgotPasswordRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + } + } + }, + "handlers.LogoutRequest": { + "type": "object", + "properties": { + "refreshToken": { + "type": "string" + } + } + }, + "handlers.MagicLinkRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + } + } + }, + "handlers.MagicLinkVerifyRequest": { + "type": "object", + "properties": { + "token": { + "type": "string" + } + } + }, "handlers.QueueEmailRequest": { "type": "object", "properties": { @@ -2361,6 +3365,54 @@ const docTemplate = `{ } } }, + "handlers.RefreshTokenRequest": { + "type": "object", + "properties": { + "refreshToken": { + "type": "string" + } + } + }, + "handlers.RegisterUserRequest": { + "type": "object", + "properties": { + "confirmPassword": { + "type": "string" + }, + "email": { + "type": "string" + }, + "firstName": { + "type": "string" + }, + "lastName": { + "type": "string" + }, + "password": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "handlers.ResetPasswordRequest": { + "type": "object", + "properties": { + "confirmPassword": { + "type": "string" + }, + "id": { + "type": "string" + }, + "password": { + "type": "string" + }, + "token": { + "type": "string" + } + } + }, "handlers.SuccessResponse": { "type": "object", "properties": { @@ -2490,6 +3542,64 @@ const docTemplate = `{ } } }, + "handlers.UpdateUserAccountRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "firstName": { + "type": "string" + }, + "lastName": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "handlers.UserData": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "emailVerified": { + "type": "string" + }, + "firstName": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isPterodactylAdmin": { + "type": "boolean" + }, + "isSystemAdmin": { + "type": "boolean" + }, + "isVirtfusionAdmin": { + "type": "boolean" + }, + "lastName": { + "type": "string" + }, + "pterodactylId": { + "type": "integer" + }, + "roles": { + "type": "array", + "items": { + "type": "string" + } + }, + "username": { + "type": "string" + } + } + }, "types.CreateGameSessionRequest": { "type": "object", "properties": { @@ -2502,6 +3612,11 @@ const docTemplate = `{ "description": "Profile/character UUID (optional if previously selected)", "type": "string", "example": "550e8400-e29b-41d4-a716-446655440001" + }, + "server_id": { + "description": "Server ID to link this session to (optional)", + "type": "string", + "example": "srv_abc123" } } }, diff --git a/docs/swagger.json b/docs/swagger.json index cadd0fd..332704d 100644 --- a/docs/swagger.json +++ b/docs/swagger.json @@ -1069,6 +1069,891 @@ } } }, + "/api/v1/auth/check-email": { + "get": { + "description": "Checks if an email address is already registered in the system", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Check Email Exists", + "parameters": [ + { + "type": "string", + "description": "Email address to check", + "name": "email", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "Email availability status", + "schema": { + "type": "object", + "additionalProperties": true + } + }, + "400": { + "description": "Invalid email", + "schema": { + "type": "object", + "additionalProperties": true + } + } + } + } + }, + "/api/v1/auth/forgot-password": { + "post": { + "description": "Sends password reset email with reset token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Forgot Password", + "parameters": [ + { + "description": "User email", + "name": "email", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.ForgotPasswordRequest" + } + } + ], + "responses": { + "200": { + "description": "Reset email sent", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid email", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/login": { + "post": { + "description": "Authenticates a user with email and password, returns JWT tokens", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Login", + "parameters": [ + { + "description": "Login credentials", + "name": "credentials", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.CredentialsRequest" + } + } + ], + "responses": { + "200": { + "description": "Login successful with JWT tokens", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid request", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "401": { + "description": "Invalid credentials or email not verified", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/logout": { + "post": { + "description": "Invalidates refresh token and terminates user session", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Logout", + "parameters": [ + { + "description": "Optional refresh token to invalidate", + "name": "logout", + "in": "body", + "schema": { + "$ref": "#/definitions/handlers.LogoutRequest" + } + }, + { + "type": "string", + "example": "Bearer eyJhbGc...", + "description": "Bearer token", + "name": "Authorization", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Logged out successfully", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/magic-link": { + "post": { + "description": "Sends a passwordless authentication magic link to user's email", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Request Magic Link", + "parameters": [ + { + "description": "User email", + "name": "magicLink", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.MagicLinkRequest" + } + } + ], + "responses": { + "200": { + "description": "Magic link sent", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid email", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/magic-link/verify": { + "post": { + "description": "Verifies magic link token and authenticates user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Verify Magic Link", + "parameters": [ + { + "description": "Magic link token", + "name": "verify", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.MagicLinkVerifyRequest" + } + } + ], + "responses": { + "200": { + "description": "Authentication successful with JWT tokens", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid request", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "401": { + "description": "Invalid or expired token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/me": { + "get": { + "description": "Returns authenticated user information from JWT token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get Current User", + "parameters": [ + { + "type": "string", + "example": "Bearer eyJhbGc...", + "description": "Bearer token", + "name": "Authorization", + "in": "header", + "required": true + } + ], + "responses": { + "200": { + "description": "User data", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "401": { + "description": "Missing or invalid token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/refresh": { + "post": { + "description": "Exchanges a valid refresh token for new access and refresh tokens", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Refresh Access Token", + "parameters": [ + { + "description": "Refresh token", + "name": "refresh", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.RefreshTokenRequest" + } + } + ], + "responses": { + "200": { + "description": "New tokens generated", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Missing refresh token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "401": { + "description": "Invalid or expired refresh token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/register": { + "post": { + "description": "Registers a new user account and sends verification email", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "User Registration", + "parameters": [ + { + "description": "Registration details", + "name": "registration", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.RegisterUserRequest" + } + } + ], + "responses": { + "201": { + "description": "User registered successfully", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid request or validation error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "409": { + "description": "Email already exists", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/reset-password": { + "post": { + "description": "Resets user password using reset token from email", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Reset Password", + "parameters": [ + { + "description": "Reset token and new password", + "name": "reset", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.ResetPasswordRequest" + } + } + ], + "responses": { + "200": { + "description": "Password reset successfully", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid request or weak password", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found or invalid token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/users/{id}": { + "get": { + "description": "Retrieves user information by user ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Get User By ID", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "User information", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/auth/validate": { + "post": { + "description": "Validates user credentials without creating a session", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Validate Credentials", + "parameters": [ + { + "description": "Credentials to validate", + "name": "credentials", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.CredentialsValidateRequest" + } + } + ], + "responses": { + "200": { + "description": "Credentials are valid", + "schema": { + "type": "object", + "additionalProperties": true + } + }, + "400": { + "description": "Invalid request", + "schema": { + "type": "object", + "additionalProperties": true + } + }, + "401": { + "description": "Invalid credentials", + "schema": { + "type": "object", + "additionalProperties": true + } + } + } + } + }, + "/api/v1/auth/verify-email": { + "post": { + "description": "Verifies user email address with verification token", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Authentication" + ], + "summary": "Verify Email", + "parameters": [ + { + "description": "Verification token", + "name": "verification", + "in": "body", + "required": true, + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ], + "responses": { + "200": { + "description": "Email verified successfully", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "400": { + "description": "Invalid or missing token", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "404": { + "description": "User not found", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.AuthResponse" + } + } + } + } + }, + "/api/v1/dashboard/account": { + "get": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Retrieves account information for the authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Get user account", + "responses": { + "200": { + "description": "Account info retrieved", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + }, + "put": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Updates account information for the authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Update user account", + "parameters": [ + { + "description": "Account update data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.UpdateUserAccountRequest" + } + } + ], + "responses": { + "200": { + "description": "Account updated", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "400": { + "description": "Invalid request", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + } + }, + "/api/v1/dashboard/account/password": { + "put": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Changes password for the authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Change user password", + "parameters": [ + { + "description": "Password change data", + "name": "payload", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/handlers.ChangePasswordRequest" + } + } + ], + "responses": { + "200": { + "description": "Password changed", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "400": { + "description": "Invalid request", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized or wrong password", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + } + }, + "/api/v1/dashboard/servers": { + "get": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Retrieves paginated list of servers owned by the authenticated user with search and filtering", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Get user servers", + "parameters": [ + { + "type": "integer", + "default": 1, + "description": "Page number", + "name": "page", + "in": "query" + }, + { + "type": "integer", + "default": 12, + "description": "Items per page", + "name": "per_page", + "in": "query" + }, + { + "type": "string", + "description": "Search query", + "name": "search", + "in": "query" + }, + { + "type": "string", + "description": "Status filter", + "name": "status", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Servers retrieved", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + } + }, + "/api/v1/dashboard/stats": { + "get": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Retrieves statistics for the user's dashboard including server counts and recent servers", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Dashboard" + ], + "summary": "Get dashboard stats", + "responses": { + "200": { + "description": "Dashboard stats retrieved", + "schema": { + "$ref": "#/definitions/handlers.SuccessResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + }, + "500": { + "description": "Internal server error", + "schema": { + "$ref": "#/definitions/handlers.ErrorResponse" + } + } + } + } + }, "/api/v1/email/queue": { "post": { "security": [ @@ -2313,6 +3198,93 @@ } }, "definitions": { + "auth.TokenPair": { + "type": "object", + "properties": { + "accessToken": { + "type": "string" + }, + "expiresIn": { + "description": "seconds until access token expires", + "type": "integer" + }, + "refreshToken": { + "type": "string" + }, + "tokenType": { + "type": "string" + } + } + }, + "handlers.AuthResponse": { + "type": "object", + "properties": { + "accessToken": { + "description": "For backward compatibility", + "type": "string" + }, + "error": { + "type": "string" + }, + "expiresIn": { + "description": "For backward compatibility", + "type": "integer" + }, + "message": { + "type": "string" + }, + "refreshToken": { + "description": "For backward compatibility", + "type": "string" + }, + "success": { + "type": "boolean" + }, + "token": { + "description": "Deprecated: use tokens instead", + "type": "string" + }, + "tokens": { + "$ref": "#/definitions/auth.TokenPair" + }, + "user": { + "$ref": "#/definitions/handlers.UserData" + } + } + }, + "handlers.ChangePasswordRequest": { + "type": "object", + "properties": { + "currentPassword": { + "type": "string" + }, + "newPassword": { + "type": "string" + } + } + }, + "handlers.CredentialsRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "handlers.CredentialsValidateRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, "handlers.DispatchWebhookRequest": { "type": "object", "properties": { @@ -2339,6 +3311,38 @@ } } }, + "handlers.ForgotPasswordRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + } + } + }, + "handlers.LogoutRequest": { + "type": "object", + "properties": { + "refreshToken": { + "type": "string" + } + } + }, + "handlers.MagicLinkRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + } + } + }, + "handlers.MagicLinkVerifyRequest": { + "type": "object", + "properties": { + "token": { + "type": "string" + } + } + }, "handlers.QueueEmailRequest": { "type": "object", "properties": { @@ -2359,6 +3363,54 @@ } } }, + "handlers.RefreshTokenRequest": { + "type": "object", + "properties": { + "refreshToken": { + "type": "string" + } + } + }, + "handlers.RegisterUserRequest": { + "type": "object", + "properties": { + "confirmPassword": { + "type": "string" + }, + "email": { + "type": "string" + }, + "firstName": { + "type": "string" + }, + "lastName": { + "type": "string" + }, + "password": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "handlers.ResetPasswordRequest": { + "type": "object", + "properties": { + "confirmPassword": { + "type": "string" + }, + "id": { + "type": "string" + }, + "password": { + "type": "string" + }, + "token": { + "type": "string" + } + } + }, "handlers.SuccessResponse": { "type": "object", "properties": { @@ -2488,6 +3540,64 @@ } } }, + "handlers.UpdateUserAccountRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "firstName": { + "type": "string" + }, + "lastName": { + "type": "string" + }, + "username": { + "type": "string" + } + } + }, + "handlers.UserData": { + "type": "object", + "properties": { + "email": { + "type": "string" + }, + "emailVerified": { + "type": "string" + }, + "firstName": { + "type": "string" + }, + "id": { + "type": "string" + }, + "isPterodactylAdmin": { + "type": "boolean" + }, + "isSystemAdmin": { + "type": "boolean" + }, + "isVirtfusionAdmin": { + "type": "boolean" + }, + "lastName": { + "type": "string" + }, + "pterodactylId": { + "type": "integer" + }, + "roles": { + "type": "array", + "items": { + "type": "string" + } + }, + "username": { + "type": "string" + } + } + }, "types.CreateGameSessionRequest": { "type": "object", "properties": { @@ -2500,6 +3610,11 @@ "description": "Profile/character UUID (optional if previously selected)", "type": "string", "example": "550e8400-e29b-41d4-a716-446655440001" + }, + "server_id": { + "description": "Server ID to link this session to (optional)", + "type": "string", + "example": "srv_abc123" } } }, diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 89d0a98..c0f2b9a 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -1,5 +1,63 @@ basePath: / definitions: + auth.TokenPair: + properties: + accessToken: + type: string + expiresIn: + description: seconds until access token expires + type: integer + refreshToken: + type: string + tokenType: + type: string + type: object + handlers.AuthResponse: + properties: + accessToken: + description: For backward compatibility + type: string + error: + type: string + expiresIn: + description: For backward compatibility + type: integer + message: + type: string + refreshToken: + description: For backward compatibility + type: string + success: + type: boolean + token: + description: 'Deprecated: use tokens instead' + type: string + tokens: + $ref: '#/definitions/auth.TokenPair' + user: + $ref: '#/definitions/handlers.UserData' + type: object + handlers.ChangePasswordRequest: + properties: + currentPassword: + type: string + newPassword: + type: string + type: object + handlers.CredentialsRequest: + properties: + email: + type: string + password: + type: string + type: object + handlers.CredentialsValidateRequest: + properties: + email: + type: string + password: + type: string + type: object handlers.DispatchWebhookRequest: properties: data: @@ -17,6 +75,26 @@ definitions: success: type: boolean type: object + handlers.ForgotPasswordRequest: + properties: + email: + type: string + type: object + handlers.LogoutRequest: + properties: + refreshToken: + type: string + type: object + handlers.MagicLinkRequest: + properties: + email: + type: string + type: object + handlers.MagicLinkVerifyRequest: + properties: + token: + type: string + type: object handlers.QueueEmailRequest: properties: data: @@ -30,6 +108,37 @@ definitions: to: type: string type: object + handlers.RefreshTokenRequest: + properties: + refreshToken: + type: string + type: object + handlers.RegisterUserRequest: + properties: + confirmPassword: + type: string + email: + type: string + firstName: + type: string + lastName: + type: string + password: + type: string + username: + type: string + type: object + handlers.ResetPasswordRequest: + properties: + confirmPassword: + type: string + id: + type: string + password: + type: string + token: + type: string + type: object handlers.SuccessResponse: properties: data: {} @@ -118,6 +227,44 @@ definitions: autoSyncInterval: type: integer type: object + handlers.UpdateUserAccountRequest: + properties: + email: + type: string + firstName: + type: string + lastName: + type: string + username: + type: string + type: object + handlers.UserData: + properties: + email: + type: string + emailVerified: + type: string + firstName: + type: string + id: + type: string + isPterodactylAdmin: + type: boolean + isSystemAdmin: + type: boolean + isVirtfusionAdmin: + type: boolean + lastName: + type: string + pterodactylId: + type: integer + roles: + items: + type: string + type: array + username: + type: string + type: object types.CreateGameSessionRequest: properties: account_id: @@ -128,6 +275,10 @@ definitions: description: Profile/character UUID (optional if previously selected) example: 550e8400-e29b-41d4-a716-446655440001 type: string + server_id: + description: Server ID to link this session to (optional) + example: srv_abc123 + type: string type: object types.CreateGameSessionResponseDTO: properties: @@ -1083,6 +1234,584 @@ paths: summary: Get public statistics tags: - Public + /api/v1/auth/check-email: + get: + consumes: + - application/json + description: Checks if an email address is already registered in the system + parameters: + - description: Email address to check + in: query + name: email + required: true + type: string + produces: + - application/json + responses: + "200": + description: Email availability status + schema: + additionalProperties: true + type: object + "400": + description: Invalid email + schema: + additionalProperties: true + type: object + summary: Check Email Exists + tags: + - Authentication + /api/v1/auth/forgot-password: + post: + consumes: + - application/json + description: Sends password reset email with reset token + parameters: + - description: User email + in: body + name: email + required: true + schema: + $ref: '#/definitions/handlers.ForgotPasswordRequest' + produces: + - application/json + responses: + "200": + description: Reset email sent + schema: + $ref: '#/definitions/handlers.AuthResponse' + "400": + description: Invalid email + schema: + $ref: '#/definitions/handlers.AuthResponse' + "404": + description: User not found + schema: + $ref: '#/definitions/handlers.AuthResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: Forgot Password + tags: + - Authentication + /api/v1/auth/login: + post: + consumes: + - application/json + description: Authenticates a user with email and password, returns JWT tokens + parameters: + - description: Login credentials + in: body + name: credentials + required: true + schema: + $ref: '#/definitions/handlers.CredentialsRequest' + produces: + - application/json + responses: + "200": + description: Login successful with JWT tokens + schema: + $ref: '#/definitions/handlers.AuthResponse' + "400": + description: Invalid request + schema: + $ref: '#/definitions/handlers.AuthResponse' + "401": + description: Invalid credentials or email not verified + schema: + $ref: '#/definitions/handlers.AuthResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: User Login + tags: + - Authentication + /api/v1/auth/logout: + post: + consumes: + - application/json + description: Invalidates refresh token and terminates user session + parameters: + - description: Optional refresh token to invalidate + in: body + name: logout + schema: + $ref: '#/definitions/handlers.LogoutRequest' + - description: Bearer token + example: Bearer eyJhbGc... + in: header + name: Authorization + type: string + produces: + - application/json + responses: + "200": + description: Logged out successfully + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: User Logout + tags: + - Authentication + /api/v1/auth/magic-link: + post: + consumes: + - application/json + description: Sends a passwordless authentication magic link to user's email + parameters: + - description: User email + in: body + name: magicLink + required: true + schema: + $ref: '#/definitions/handlers.MagicLinkRequest' + produces: + - application/json + responses: + "200": + description: Magic link sent + schema: + $ref: '#/definitions/handlers.AuthResponse' + "400": + description: Invalid email + schema: + $ref: '#/definitions/handlers.AuthResponse' + "404": + description: User not found + schema: + $ref: '#/definitions/handlers.AuthResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: Request Magic Link + tags: + - Authentication + /api/v1/auth/magic-link/verify: + post: + consumes: + - application/json + description: Verifies magic link token and authenticates user + parameters: + - description: Magic link token + in: body + name: verify + required: true + schema: + $ref: '#/definitions/handlers.MagicLinkVerifyRequest' + produces: + - application/json + responses: + "200": + description: Authentication successful with JWT tokens + schema: + $ref: '#/definitions/handlers.AuthResponse' + "400": + description: Invalid request + schema: + $ref: '#/definitions/handlers.AuthResponse' + "401": + description: Invalid or expired token + schema: + $ref: '#/definitions/handlers.AuthResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: Verify Magic Link + tags: + - Authentication + /api/v1/auth/me: + get: + consumes: + - application/json + description: Returns authenticated user information from JWT token + parameters: + - description: Bearer token + example: Bearer eyJhbGc... + in: header + name: Authorization + required: true + type: string + produces: + - application/json + responses: + "200": + description: User data + schema: + $ref: '#/definitions/handlers.AuthResponse' + "401": + description: Missing or invalid token + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: Get Current User + tags: + - Authentication + /api/v1/auth/refresh: + post: + consumes: + - application/json + description: Exchanges a valid refresh token for new access and refresh tokens + parameters: + - description: Refresh token + in: body + name: refresh + required: true + schema: + $ref: '#/definitions/handlers.RefreshTokenRequest' + produces: + - application/json + responses: + "200": + description: New tokens generated + schema: + $ref: '#/definitions/handlers.AuthResponse' + "400": + description: Missing refresh token + schema: + $ref: '#/definitions/handlers.AuthResponse' + "401": + description: Invalid or expired refresh token + schema: + $ref: '#/definitions/handlers.AuthResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: Refresh Access Token + tags: + - Authentication + /api/v1/auth/register: + post: + consumes: + - application/json + description: Registers a new user account and sends verification email + parameters: + - description: Registration details + in: body + name: registration + required: true + schema: + $ref: '#/definitions/handlers.RegisterUserRequest' + produces: + - application/json + responses: + "201": + description: User registered successfully + schema: + $ref: '#/definitions/handlers.AuthResponse' + "400": + description: Invalid request or validation error + schema: + $ref: '#/definitions/handlers.AuthResponse' + "409": + description: Email already exists + schema: + $ref: '#/definitions/handlers.AuthResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: User Registration + tags: + - Authentication + /api/v1/auth/reset-password: + post: + consumes: + - application/json + description: Resets user password using reset token from email + parameters: + - description: Reset token and new password + in: body + name: reset + required: true + schema: + $ref: '#/definitions/handlers.ResetPasswordRequest' + produces: + - application/json + responses: + "200": + description: Password reset successfully + schema: + $ref: '#/definitions/handlers.AuthResponse' + "400": + description: Invalid request or weak password + schema: + $ref: '#/definitions/handlers.AuthResponse' + "404": + description: User not found or invalid token + schema: + $ref: '#/definitions/handlers.AuthResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: Reset Password + tags: + - Authentication + /api/v1/auth/users/{id}: + get: + consumes: + - application/json + description: Retrieves user information by user ID + parameters: + - description: User ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: User information + schema: + $ref: '#/definitions/handlers.AuthResponse' + "404": + description: User not found + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: Get User By ID + tags: + - Authentication + /api/v1/auth/validate: + post: + consumes: + - application/json + description: Validates user credentials without creating a session + parameters: + - description: Credentials to validate + in: body + name: credentials + required: true + schema: + $ref: '#/definitions/handlers.CredentialsValidateRequest' + produces: + - application/json + responses: + "200": + description: Credentials are valid + schema: + additionalProperties: true + type: object + "400": + description: Invalid request + schema: + additionalProperties: true + type: object + "401": + description: Invalid credentials + schema: + additionalProperties: true + type: object + summary: Validate Credentials + tags: + - Authentication + /api/v1/auth/verify-email: + post: + consumes: + - application/json + description: Verifies user email address with verification token + parameters: + - description: Verification token + in: body + name: verification + required: true + schema: + additionalProperties: + type: string + type: object + produces: + - application/json + responses: + "200": + description: Email verified successfully + schema: + $ref: '#/definitions/handlers.AuthResponse' + "400": + description: Invalid or missing token + schema: + $ref: '#/definitions/handlers.AuthResponse' + "404": + description: User not found + schema: + $ref: '#/definitions/handlers.AuthResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.AuthResponse' + summary: Verify Email + tags: + - Authentication + /api/v1/dashboard/account: + get: + consumes: + - application/json + description: Retrieves account information for the authenticated user + produces: + - application/json + responses: + "200": + description: Account info retrieved + schema: + $ref: '#/definitions/handlers.SuccessResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/handlers.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.ErrorResponse' + security: + - BearerAuth: [] + summary: Get user account + tags: + - Dashboard + put: + consumes: + - application/json + description: Updates account information for the authenticated user + parameters: + - description: Account update data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/handlers.UpdateUserAccountRequest' + produces: + - application/json + responses: + "200": + description: Account updated + schema: + $ref: '#/definitions/handlers.SuccessResponse' + "400": + description: Invalid request + schema: + $ref: '#/definitions/handlers.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/handlers.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.ErrorResponse' + security: + - BearerAuth: [] + summary: Update user account + tags: + - Dashboard + /api/v1/dashboard/account/password: + put: + consumes: + - application/json + description: Changes password for the authenticated user + parameters: + - description: Password change data + in: body + name: payload + required: true + schema: + $ref: '#/definitions/handlers.ChangePasswordRequest' + produces: + - application/json + responses: + "200": + description: Password changed + schema: + $ref: '#/definitions/handlers.SuccessResponse' + "400": + description: Invalid request + schema: + $ref: '#/definitions/handlers.ErrorResponse' + "401": + description: Unauthorized or wrong password + schema: + $ref: '#/definitions/handlers.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.ErrorResponse' + security: + - BearerAuth: [] + summary: Change user password + tags: + - Dashboard + /api/v1/dashboard/servers: + get: + consumes: + - application/json + description: Retrieves paginated list of servers owned by the authenticated + user with search and filtering + parameters: + - default: 1 + description: Page number + in: query + name: page + type: integer + - default: 12 + description: Items per page + in: query + name: per_page + type: integer + - description: Search query + in: query + name: search + type: string + - description: Status filter + in: query + name: status + type: string + produces: + - application/json + responses: + "200": + description: Servers retrieved + schema: + $ref: '#/definitions/handlers.SuccessResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/handlers.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.ErrorResponse' + security: + - BearerAuth: [] + summary: Get user servers + tags: + - Dashboard + /api/v1/dashboard/stats: + get: + consumes: + - application/json + description: Retrieves statistics for the user's dashboard including server + counts and recent servers + produces: + - application/json + responses: + "200": + description: Dashboard stats retrieved + schema: + $ref: '#/definitions/handlers.SuccessResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/handlers.ErrorResponse' + "500": + description: Internal server error + schema: + $ref: '#/definitions/handlers.ErrorResponse' + security: + - BearerAuth: [] + summary: Get dashboard stats + tags: + - Dashboard /api/v1/email/queue: post: consumes: diff --git a/go.mod b/go.mod index 4909808..780180f 100644 --- a/go.mod +++ b/go.mod @@ -6,10 +6,12 @@ require ( github.com/getsentry/sentry-go v0.40.0 github.com/getsentry/sentry-go/fiber v0.40.0 github.com/gofiber/fiber/v2 v2.52.9 + github.com/golang-jwt/jwt/v5 v5.3.0 github.com/google/uuid v1.6.0 github.com/hibiken/asynq v0.24.1 github.com/jackc/pgx/v5 v5.7.2 github.com/joho/godotenv v1.5.1 + github.com/lib/pq v1.10.9 github.com/robfig/cron/v3 v3.0.1 github.com/rs/zerolog v1.33.0 github.com/swaggo/swag v1.16.6 diff --git a/go.sum b/go.sum index 52f1d81..fd38dea 100644 --- a/go.sum +++ b/go.sum @@ -36,6 +36,8 @@ github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/ github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gofiber/fiber/v2 v2.52.9 h1:YjKl5DOiyP3j0mO61u3NTmK7or8GzzWzCFzkboyP5cw= github.com/gofiber/fiber/v2 v2.52.9/go.mod h1:YEcBbO/FB+5M1IZNBP9FO3J9281zgPAreiI1oqg8nDw= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= @@ -69,6 +71,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= diff --git a/internal/auth/jwt.go b/internal/auth/jwt.go new file mode 100644 index 0000000..1e744c0 --- /dev/null +++ b/internal/auth/jwt.go @@ -0,0 +1,124 @@ +package auth + +import ( + "crypto/rand" + "encoding/hex" + "errors" + "time" + + "github.com/golang-jwt/jwt/v5" +) + +// JWTService handles JWT token generation and validation +type JWTService struct { + secretKey []byte + accessTokenTTL time.Duration + refreshTokenTTL time.Duration +} + +// Claims represents JWT claims +type Claims struct { + UserID string `json:"id"` + Email string `json:"email"` + Username string `json:"username"` + FirstName *string `json:"firstName,omitempty"` + LastName *string `json:"lastName,omitempty"` + Roles []string `json:"roles"` + IsPterodactylAdmin bool `json:"isPterodactylAdmin"` + IsVirtfusionAdmin bool `json:"isVirtfusionAdmin"` + IsSystemAdmin bool `json:"isSystemAdmin"` + PterodactylID *int `json:"pterodactylId,omitempty"` + EmailVerified *string `json:"emailVerified,omitempty"` + jwt.RegisteredClaims +} + +// TokenPair represents access and refresh tokens +type TokenPair struct { + AccessToken string `json:"accessToken"` + RefreshToken string `json:"refreshToken"` + ExpiresIn int64 `json:"expiresIn"` // seconds until access token expires + TokenType string `json:"tokenType"` +} + +// NewJWTService creates a new JWT service +func NewJWTService(secretKey string) *JWTService { + return &JWTService{ + secretKey: []byte(secretKey), + accessTokenTTL: 30 * 24 * time.Hour, // 30 days (matching NextAuth) + refreshTokenTTL: 90 * 24 * time.Hour, // 90 days + } +} + +// GenerateTokenPair generates both access and refresh tokens +func (s *JWTService) GenerateTokenPair(claims *Claims) (*TokenPair, error) { + // Set expiration for access token + now := time.Now() + claims.IssuedAt = jwt.NewNumericDate(now) + claims.ExpiresAt = jwt.NewNumericDate(now.Add(s.accessTokenTTL)) + claims.NotBefore = jwt.NewNumericDate(now) + + // Generate access token (JWT) + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + accessToken, err := token.SignedString(s.secretKey) + if err != nil { + return nil, err + } + + // Generate refresh token (random string) + refreshToken, err := s.generateRefreshToken() + if err != nil { + return nil, err + } + + return &TokenPair{ + AccessToken: accessToken, + RefreshToken: refreshToken, + ExpiresIn: int64(s.accessTokenTTL.Seconds()), + TokenType: "Bearer", + }, nil +} + +// ValidateAccessToken validates and parses an access token +func (s *JWTService) ValidateAccessToken(tokenString string) (*Claims, error) { + token, err := jwt.ParseWithClaims(tokenString, &Claims{}, func(token *jwt.Token) (interface{}, error) { + // Verify signing method + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, errors.New("invalid signing method") + } + return s.secretKey, nil + }) + + if err != nil { + return nil, err + } + + if !token.Valid { + return nil, errors.New("invalid token") + } + + claims, ok := token.Claims.(*Claims) + if !ok { + return nil, errors.New("invalid token claims") + } + + return claims, nil +} + +// generateRefreshToken generates a secure random refresh token +func (s *JWTService) generateRefreshToken() (string, error) { + bytes := make([]byte, 32) + if _, err := rand.Read(bytes); err != nil { + return "", err + } + return hex.EncodeToString(bytes), nil +} + +// GetAccessTokenTTL returns the access token TTL +func (s *JWTService) GetAccessTokenTTL() time.Duration { + return s.accessTokenTTL +} + +// GetRefreshTokenTTL returns the refresh token TTL +func (s *JWTService) GetRefreshTokenTTL() time.Duration { + return s.refreshTokenTTL +} diff --git a/internal/database/auth.go b/internal/database/auth.go index 5fda5cc..bf5f975 100644 --- a/internal/database/auth.go +++ b/internal/database/auth.go @@ -35,10 +35,10 @@ func (db *DB) QueryUserByEmail(ctx context.Context, email string) (*User, error) err := db.Pool.QueryRow(ctx, `SELECT - id, email, password, username, first_name, last_name, - roles, is_pterodactyl_admin, is_virtfusion_admin, is_system_admin, - pterodactyl_id, email_verified, is_active, avatar_url, - created_at, updated_at, last_login_at + id, email, password, username, "firstName", "lastName", + roles, "isPterodactylAdmin", "isVirtfusionAdmin", "isSystemAdmin", + "pterodactylId", "emailVerified", "isActive", "avatarUrl", + "createdAt", "updatedAt", "lastLoginAt" FROM users WHERE email = $1`, email, @@ -64,10 +64,10 @@ func (db *DB) QueryUserByID(ctx context.Context, id string) (*User, error) { err := db.Pool.QueryRow(ctx, `SELECT - id, email, password, username, first_name, last_name, - roles, is_pterodactyl_admin, is_virtfusion_admin, is_system_admin, - pterodactyl_id, email_verified, is_active, avatar_url, - created_at, updated_at, last_login_at + id, email, password, username, "firstName", "lastName", + roles, "isPterodactylAdmin", "isVirtfusionAdmin", "isSystemAdmin", + "pterodactylId", "emailVerified", "isActive", "avatarUrl", + "createdAt", "updatedAt", "lastLoginAt" FROM users WHERE id = $1`, id, @@ -101,11 +101,11 @@ func (db *DB) CreateUser(ctx context.Context, user *User, password string) (*Use err = db.Pool.QueryRow(ctx, `INSERT INTO users - (id, email, password, username, first_name, last_name, roles, - is_pterodactyl_admin, is_virtfusion_admin, is_system_admin, - is_active, created_at, updated_at) + (id, email, password, username, "firstName", "lastName", roles, + "isPterodactylAdmin", "isVirtfusionAdmin", "isSystemAdmin", + "isActive", "createdAt", "updatedAt") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) - RETURNING id, email, username, first_name, last_name, roles`, + RETURNING id, email, username, "firstName", "lastName", roles`, userID, user.Email, string(hashedPassword), user.Username, user.FirstName, user.LastName, user.Roles, user.IsPterodactylAdmin, user.IsVirtfusionAdmin, user.IsSystemAdmin, @@ -128,13 +128,23 @@ func (db *DB) CreateUser(ctx context.Context, user *User, password string) (*Use } // VerifyPassword checks if the provided password matches the user's hashed password +// Supports both $2a$ (Go bcrypt) and $2b$ (bcryptjs) hash formats func (u *User) VerifyPassword(password string) bool { if !u.Password.Valid { return false } + hash := u.Password.String + + // bcryptjs uses $2b$ prefix, but Go's bcrypt uses $2a$ + // They are compatible - we just need to normalize the prefix for Go's library + // Replace $2b$ with $2a$ for compatibility + if len(hash) > 4 && hash[:4] == "$2b$" { + hash = "$2a$" + hash[4:] + } + err := bcrypt.CompareHashAndPassword( - []byte(u.Password.String), + []byte(hash), []byte(password), ) @@ -149,11 +159,11 @@ func (db *DB) StoreVerificationToken(ctx context.Context, userID string, tokenTy expiresAt := time.Now().Add(expiration) _, err := db.Pool.Exec(ctx, - `INSERT INTO verification_tokens (user_id, token, type, expires_at, created_at) - VALUES ($1, $2, $3, $4, $5) - ON CONFLICT (user_id, type) DO UPDATE - SET token = $2, expires_at = $4, created_at = $5`, - userID, hashedToken, tokenType, expiresAt, time.Now(), + `INSERT INTO verification_tokens (identifier, token, type, expires) + VALUES ($1, $2, $3, $4) + ON CONFLICT (identifier, token) DO UPDATE + SET expires = $4`, + userID, hashedToken, tokenType, expiresAt, ) if err != nil { @@ -168,12 +178,12 @@ func (db *DB) VerifyEmailToken(ctx context.Context, userID, token string) (bool, hashedToken := hashToken(token) // Check token exists and is not expired - var id string + var tokenVal string err := db.Pool.QueryRow(ctx, - `SELECT id FROM verification_tokens - WHERE user_id = $1 AND token = $2 AND type = $3 AND expires_at > NOW()`, + `SELECT token FROM verification_tokens + WHERE identifier = $1 AND token = $2 AND type = $3 AND expires > NOW()`, userID, hashedToken, VerificationTokenType, - ).Scan(&id) + ).Scan(&tokenVal) if err != nil { return false, err @@ -182,8 +192,8 @@ func (db *DB) VerifyEmailToken(ctx context.Context, userID, token string) (bool, // Mark email as verified and delete token _, err = db.Pool.Exec(ctx, `BEGIN; - UPDATE users SET email_verified = NOW() WHERE id = $1; - DELETE FROM verification_tokens WHERE user_id = $1 AND type = $2; + UPDATE users SET "emailVerified" = NOW() WHERE id = $1; + DELETE FROM verification_tokens WHERE identifier = $1 AND type = $2; COMMIT;`, userID, VerificationTokenType, ) @@ -199,12 +209,12 @@ func (db *DB) VerifyEmailToken(ctx context.Context, userID, token string) (bool, func (db *DB) GetPasswordResetToken(ctx context.Context, userID, token string) (bool, error) { hashedToken := hashToken(token) - var id string + var tokenVal string err := db.Pool.QueryRow(ctx, - `SELECT id FROM verification_tokens - WHERE user_id = $1 AND token = $2 AND type = $3 AND expires_at > NOW()`, + `SELECT token FROM verification_tokens + WHERE identifier = $1 AND token = $2 AND type = $3 AND expires > NOW()`, userID, hashedToken, PasswordResetTokenType, - ).Scan(&id) + ).Scan(&tokenVal) if err != nil { return false, err @@ -230,8 +240,8 @@ func (db *DB) ResetUserPassword(ctx context.Context, userID, token, newPassword // Update password and delete token in transaction _, err = db.Pool.Exec(ctx, `BEGIN; - UPDATE users SET password = $1, updated_at = NOW() WHERE id = $2; - DELETE FROM verification_tokens WHERE user_id = $2 AND type = $3; + UPDATE users SET password = $1, "updatedAt" = NOW() WHERE id = $2; + DELETE FROM verification_tokens WHERE identifier = $2 AND type = $3; COMMIT;`, string(hashedPassword), userID, PasswordResetTokenType, ) @@ -249,11 +259,11 @@ func (db *DB) GetMagicLinkToken(ctx context.Context, token string) (*Verificatio vt := &VerificationToken{} err := db.Pool.QueryRow(ctx, - `SELECT user_id, token, type, expires_at, created_at + `SELECT identifier, token, type, expires FROM verification_tokens - WHERE token = $1 AND type = $2 AND expires_at > NOW()`, + WHERE token = $1 AND type = $2 AND expires > NOW()`, hashedToken, MagicLinkTokenType, - ).Scan(&vt.UserID, &vt.Token, &vt.Type, &vt.ExpiresAt, &vt.CreatedAt) + ).Scan(&vt.UserID, &vt.Token, &vt.Type, &vt.ExpiresAt) if err != nil { return nil, err @@ -269,8 +279,8 @@ func (db *DB) ConsumeMagicLinkToken(ctx context.Context, token string) (string, var userID string err := db.Pool.QueryRow(ctx, `DELETE FROM verification_tokens - WHERE token = $1 AND type = $2 AND expires_at > NOW() - RETURNING user_id`, + WHERE token = $1 AND type = $2 AND expires > NOW() + RETURNING identifier`, hashedToken, MagicLinkTokenType, ).Scan(&userID) @@ -284,7 +294,7 @@ func (db *DB) ConsumeMagicLinkToken(ctx context.Context, token string) (string, // UpdateLastLogin updates the user's last login timestamp func (db *DB) UpdateLastLogin(ctx context.Context, userID string) error { _, err := db.Pool.Exec(ctx, - `UPDATE users SET last_login_at = NOW() WHERE id = $1`, + `UPDATE users SET "lastLoginAt" = NOW() WHERE id = $1`, userID, ) return err diff --git a/internal/database/sessions.go b/internal/database/sessions.go new file mode 100644 index 0000000..c075ad7 --- /dev/null +++ b/internal/database/sessions.go @@ -0,0 +1,114 @@ +package database + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +// Session represents a user session +type Session struct { + ID string + SessionToken string + UserID string + Expires time.Time + CreatedAt time.Time +} + +// CreateSession creates a new session in the database +func (db *DB) CreateSession(ctx context.Context, userID string, sessionToken string, expiresAt time.Time) (*Session, error) { + session := &Session{ + ID: uuid.New().String(), + SessionToken: sessionToken, + UserID: userID, + Expires: expiresAt, + CreatedAt: time.Now(), + } + + query := ` + INSERT INTO sessions (id, "sessionToken", "userId", expires, "createdAt") + VALUES ($1, $2, $3, $4, $5) + RETURNING id, "sessionToken", "userId", expires, "createdAt" + ` + + err := db.Pool.QueryRow(ctx, query, + session.ID, + session.SessionToken, + session.UserID, + session.Expires, + session.CreatedAt, + ).Scan( + &session.ID, + &session.SessionToken, + &session.UserID, + &session.Expires, + &session.CreatedAt, + ) + + if err != nil { + return nil, err + } + + return session, nil +} + +// GetSessionByToken retrieves a session by its token +func (db *DB) GetSessionByToken(ctx context.Context, sessionToken string) (*Session, error) { + session := &Session{} + + query := ` + SELECT id, "sessionToken", "userId", expires, "createdAt" + FROM sessions + WHERE "sessionToken" = $1 AND expires > NOW() + ` + + err := db.Pool.QueryRow(ctx, query, sessionToken).Scan( + &session.ID, + &session.SessionToken, + &session.UserID, + &session.Expires, + &session.CreatedAt, + ) + + if err != nil { + return nil, err + } + + return session, nil +} + +// DeleteSession deletes a session from the database +func (db *DB) DeleteSession(ctx context.Context, sessionToken string) error { + query := `DELETE FROM sessions WHERE "sessionToken" = $1` + _, err := db.Pool.Exec(ctx, query, sessionToken) + return err +} + +// DeleteUserSessions deletes all sessions for a user +func (db *DB) DeleteUserSessions(ctx context.Context, userID string) error { + query := `DELETE FROM sessions WHERE "userId" = $1` + _, err := db.Pool.Exec(ctx, query, userID) + return err +} + +// DeleteExpiredSessions deletes all expired sessions +func (db *DB) DeleteExpiredSessions(ctx context.Context) (int64, error) { + query := `DELETE FROM sessions WHERE expires < NOW()` + result, err := db.Pool.Exec(ctx, query) + if err != nil { + return 0, err + } + return result.RowsAffected(), nil +} + +// UpdateSessionExpiry updates the expiry time of a session +func (db *DB) UpdateSessionExpiry(ctx context.Context, sessionToken string, newExpiry time.Time) error { + query := ` + UPDATE sessions + SET expires = $2 + WHERE session_token = $1 + ` + _, err := db.Pool.Exec(ctx, query, sessionToken, newExpiry) + return err +} diff --git a/internal/handlers/admin_servers.go b/internal/handlers/admin_servers.go new file mode 100644 index 0000000..603963c --- /dev/null +++ b/internal/handlers/admin_servers.go @@ -0,0 +1,270 @@ +package handlers + +import ( + "context" + "fmt" + "strings" + + "github.com/gofiber/fiber/v2" + "github.com/nodebyte/backend/internal/database" +) + +// AdminServerHandler handles admin server operations +type AdminServerHandler struct { + db *database.DB +} + +// NewAdminServerHandler creates a new admin server handler +func NewAdminServerHandler(db *database.DB) *AdminServerHandler { + return &AdminServerHandler{db: db} +} + +// AdminServerResponse represents a server for admin view +type AdminServerResponse struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Status string `json:"status"` + IsSuspended bool `json:"isSuspended"` + Owner *OwnerInfo `json:"owner"` + Node *NodeInfo `json:"node"` + Egg *EggInfo `json:"egg"` + Memory int64 `json:"memory"` + Disk int64 `json:"disk"` + CPU int `json:"cpu"` + CreatedAt string `json:"createdAt"` + UpdatedAt string `json:"updatedAt"` +} + +// OwnerInfo represents server owner information +type OwnerInfo struct { + ID string `json:"id"` + Email string `json:"email"` + Username string `json:"username"` +} + +// NodeInfo represents node information +type NodeInfo struct { + ID int `json:"id"` + Name string `json:"name"` + FQDN string `json:"fqdn"` +} + +// EggInfo represents egg information +type EggInfo struct { + ID int `json:"id"` + Name string `json:"name"` + Nest string `json:"nest"` +} + +// GetServersRequest represents pagination and filter parameters +type GetServersRequest struct { + Search string `query:"search"` + Status string `query:"status"` // all, online, offline, suspended, installing + Sort string `query:"sort"` // name, created, status + Order string `query:"order"` // asc, desc + Page int `query:"page"` + PageSize int `query:"pageSize"` +} + +// GetServers returns paginated list of all servers with filtering +func (h *AdminServerHandler) GetServers(c *fiber.Ctx) error { + // Parse query parameters + req := GetServersRequest{ + Search: c.Query("search", ""), + Status: c.Query("status", "all"), + Sort: c.Query("sort", "created"), + Order: c.Query("order", "desc"), + Page: c.QueryInt("page", 1), + PageSize: c.QueryInt("pageSize", 25), + } + + if req.Page < 1 { + req.Page = 1 + } + if req.PageSize < 1 || req.PageSize > 100 { + req.PageSize = 25 + } + + // Build query + query := ` + SELECT + s.id, s.name, s.description, s.status, s.is_suspended, + u.id, u.email, u.username, + n.id, n.name, n.fqdn, + e.id, e.name, nest.name, + s.memory, s.disk, s.cpu, s.created_at, s.updated_at + FROM servers s + LEFT JOIN users u ON s.owner_id = u.id + LEFT JOIN nodes n ON s.node_id = n.id + LEFT JOIN eggs e ON s.egg_id = e.id + LEFT JOIN nests nest ON e.nest_id = nest.id + WHERE 1=1 + ` + + args := []interface{}{} + + // Apply search filter + if req.Search != "" { + args = append(args, "%"+req.Search+"%") + query += fmt.Sprintf(` AND (s.name ILIKE $%d OR s.description ILIKE $%d)`, len(args), len(args)) + } + + // Apply status filter + switch req.Status { + case "online": + args = append(args, "running") + query += fmt.Sprintf(` AND s.status = $%d AND s.is_suspended = false`, len(args)) + case "offline": + args = append(args, "stopped") + query += fmt.Sprintf(` AND s.status = $%d AND s.is_suspended = false`, len(args)) + case "suspended": + query += ` AND s.is_suspended = true` + case "installing": + args = append(args, "installing") + query += fmt.Sprintf(` AND s.status = $%d`, len(args)) + // default: "all" - no additional filter + } + + // Apply sorting + sortField := "s.created_at" + if req.Sort == "name" { + sortField = "s.name" + } else if req.Sort == "status" { + sortField = "s.status" + } + sortOrder := "DESC" + if strings.ToLower(req.Order) == "asc" { + sortOrder = "ASC" + } + query += fmt.Sprintf(` ORDER BY %s %s`, sortField, sortOrder) + + // Get total count for pagination + countQuery := ` + SELECT COUNT(*) + FROM servers s + WHERE 1=1 + ` + + // Apply same filters to count query + if req.Search != "" { + countQuery += ` AND (s.name ILIKE $1 OR s.description ILIKE $1)` + } + + switch req.Status { + case "online": + countQuery += ` AND s.status = 'running' AND s.is_suspended = false` + case "offline": + countQuery += ` AND s.status = 'stopped' AND s.is_suspended = false` + case "suspended": + countQuery += ` AND s.is_suspended = true` + case "installing": + countQuery += ` AND s.status = 'installing'` + } + + var totalCount int + countArgs := []interface{}{} + if req.Search != "" { + countArgs = append(countArgs, "%"+req.Search+"%") + } + + err := h.db.Pool.QueryRow(context.Background(), countQuery, countArgs...).Scan(&totalCount) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{ + "error": "Failed to count servers", + }) + } + + // Apply pagination + offset := (req.Page - 1) * req.PageSize + args = append(args, req.PageSize, offset) + query += fmt.Sprintf(` LIMIT $%d OFFSET $%d`, len(args)-1, len(args)) + + // Execute query + rows, err := h.db.Pool.Query(context.Background(), query, args...) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{ + "error": "Failed to fetch servers", + }) + } + defer rows.Close() + + servers := []AdminServerResponse{} + for rows.Next() { + var server AdminServerResponse + var ownerID, ownerEmail, ownerUsername interface{} + var nodeID interface{} + var nodeName, nodeFQDN interface{} + var eggID interface{} + var eggName, nestName interface{} + + err := rows.Scan( + &server.ID, + &server.Name, + &server.Description, + &server.Status, + &server.IsSuspended, + &ownerID, + &ownerEmail, + &ownerUsername, + &nodeID, + &nodeName, + &nodeFQDN, + &eggID, + &eggName, + &nestName, + &server.Memory, + &server.Disk, + &server.CPU, + &server.CreatedAt, + &server.UpdatedAt, + ) + if err != nil { + continue + } + + // Map owner info + if ownerID != nil { + server.Owner = &OwnerInfo{ + ID: ownerID.(string), + Email: ownerEmail.(string), + Username: ownerUsername.(string), + } + } + + // Map node info + if nodeID != nil { + server.Node = &NodeInfo{ + ID: nodeID.(int), + Name: nodeName.(string), + FQDN: nodeFQDN.(string), + } + } + + // Map egg info + if eggID != nil { + server.Egg = &EggInfo{ + ID: eggID.(int), + Name: eggName.(string), + Nest: nestName.(string), + } + } + + servers = append(servers, server) + } + + // Calculate pagination info + totalPages := (totalCount + req.PageSize - 1) / req.PageSize + + return c.JSON(fiber.Map{ + "data": fiber.Map{ + "servers": servers, + "pagination": fiber.Map{ + "page": req.Page, + "pageSize": req.PageSize, + "total": totalCount, + "totalPages": totalPages, + }, + }, + }) +} diff --git a/internal/handlers/admin_users.go b/internal/handlers/admin_users.go new file mode 100644 index 0000000..8123c7c --- /dev/null +++ b/internal/handlers/admin_users.go @@ -0,0 +1,282 @@ +package handlers + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/lib/pq" + "github.com/nodebyte/backend/internal/database" +) + +// AdminUserHandler handles admin user operations +type AdminUserHandler struct { + db *database.DB +} + +// NewAdminUserHandler creates a new admin user handler +func NewAdminUserHandler(db *database.DB) *AdminUserHandler { + return &AdminUserHandler{db: db} +} + +// AdminUserResponse represents a user for admin view +type AdminUserResponse struct { + ID string `json:"id"` + Email string `json:"email"` + Username string `json:"username"` + FirstName string `json:"firstName"` + LastName string `json:"lastName"` + Roles []string `json:"roles"` + IsPterodactylAdmin bool `json:"isPterodactylAdmin"` + IsVirtfusionAdmin bool `json:"isVirtfusionAdmin"` + IsSystemAdmin bool `json:"isSystemAdmin"` + IsMigrated bool `json:"isMigrated"` + IsActive bool `json:"isActive"` + EmailVerified bool `json:"emailVerified"` + CreatedAt string `json:"createdAt"` + UpdatedAt string `json:"updatedAt"` + LastLoginAt string `json:"lastLoginAt,omitempty"` + ServerCount int `json:"serverCount"` + SessionCount int `json:"sessionCount"` +} + +// GetUsersRequest represents pagination and filter parameters +type GetUsersRequest struct { + Search string `query:"search"` + Filter string `query:"filter"` // all, migrated, active, admin, inactive + Sort string `query:"sort"` // email, created + Order string `query:"order"` // asc, desc + Page int `query:"page"` + PageSize int `query:"pageSize"` +} + +// GetUsers returns paginated list of all users with filtering +func (h *AdminUserHandler) GetUsers(c *fiber.Ctx) error { + // Parse query parameters + req := GetUsersRequest{ + Search: c.Query("search", ""), + Filter: c.Query("filter", "all"), + Sort: c.Query("sort", "created"), + Order: c.Query("order", "desc"), + Page: c.QueryInt("page", 1), + PageSize: c.QueryInt("pageSize", 25), + } + + if req.Page < 1 { + req.Page = 1 + } + if req.PageSize < 1 || req.PageSize > 100 { + req.PageSize = 25 + } + + // Build base query with WHERE clause first + baseQuery := `WHERE 1=1` + + // Apply search filter + if req.Search != "" { + baseQuery += ` AND (u.email ILIKE $1 OR u.username ILIKE $1)` + } + + // Apply status filter + switch req.Filter { + case "migrated": + baseQuery += ` AND u."isMigrated" = true` + case "active": + baseQuery += ` AND u."isActive" = true` + case "admin": + baseQuery += ` AND (u."isSystemAdmin" = true OR u."isPterodactylAdmin" = true OR u."isVirtfusionAdmin" = true)` + case "inactive": + baseQuery += ` AND u."isActive" = false` + // default: "all" - no additional filter + } + + // Build main query using subqueries for counts + query := ` + SELECT + u.id, u.email, u.username, + u.roles, u."isPterodactylAdmin", u."isVirtfusionAdmin", + u."isSystemAdmin", u."isMigrated", u."isActive", u."emailVerified", + u."createdAt", u."updatedAt", u."lastLoginAt", + (SELECT COUNT(*) FROM servers WHERE "ownerId" = u.id) as server_count, + (SELECT COUNT(*) FROM sessions WHERE "userId" = u.id) as session_count + FROM users u + ` + baseQuery + + // Apply sorting + sortField := "u.\"createdAt\"" + if req.Sort == "email" { + sortField = "u.email" + } + sortOrder := "DESC" + if strings.ToLower(req.Order) == "asc" { + sortOrder = "ASC" + } + query += fmt.Sprintf(` ORDER BY %s %s`, sortField, sortOrder) + + // Build count query + countQuery := `SELECT COUNT(*) FROM users u ` + baseQuery + + // Get total count for pagination + var totalCount int + args := []interface{}{} + if req.Search != "" { + args = append(args, "%"+req.Search+"%") + } + + err := h.db.Pool.QueryRow(context.Background(), countQuery, args...).Scan(&totalCount) + if err != nil { + fmt.Println("DEBUG: Count query error:", err.Error()) + fmt.Println("DEBUG: Count query:", countQuery) + fmt.Println("DEBUG: Args:", args) + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{ + "error": "Failed to count users: " + err.Error(), + }) + } + + // Apply pagination + offset := (req.Page - 1) * req.PageSize + query += fmt.Sprintf(` LIMIT %d OFFSET %d`, req.PageSize, offset) + + // Execute query + rows, err := h.db.Pool.Query(context.Background(), query, args...) + if err != nil { + fmt.Println("DEBUG: Query error:", err.Error()) + fmt.Println("DEBUG: Query:", query) + fmt.Println("DEBUG: Args:", args) + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{ + "error": "Failed to fetch users: " + err.Error(), + }) + } + defer rows.Close() + + users := []AdminUserResponse{} + for rows.Next() { + var user AdminUserResponse + var rolesArray pq.StringArray + var lastLoginAt *time.Time + var emailVerifiedTime *time.Time + var createdAt time.Time + var updatedAt time.Time + + err := rows.Scan( + &user.ID, + &user.Email, + &user.Username, + &rolesArray, + &user.IsPterodactylAdmin, + &user.IsVirtfusionAdmin, + &user.IsSystemAdmin, + &user.IsMigrated, + &user.IsActive, + &emailVerifiedTime, + &createdAt, + &updatedAt, + &lastLoginAt, + &user.ServerCount, + &user.SessionCount, + ) + if err != nil { + fmt.Printf("DEBUG: Scan error: %v\n", err) + continue + } + + // Parse roles array + user.Roles = []string(rolesArray) + if user.Roles == nil { + user.Roles = []string{} + } + + // Convert timestamps to ISO 8601 string format + user.CreatedAt = createdAt.Format(time.RFC3339) + user.UpdatedAt = updatedAt.Format(time.RFC3339) + + // Handle nullable lastLoginAt + if lastLoginAt != nil { + user.LastLoginAt = lastLoginAt.Format(time.RFC3339) + } + + // Handle nullable emailVerified (TIMESTAMP) - if not null, user has verified email + if emailVerifiedTime != nil { + user.EmailVerified = true + } + + users = append(users, user) + } + + // Calculate pagination info + totalPages := (totalCount + req.PageSize - 1) / req.PageSize + + return c.JSON(fiber.Map{ + "data": fiber.Map{ + "users": users, + "pagination": fiber.Map{ + "page": req.Page, + "pageSize": req.PageSize, + "total": totalCount, + "totalPages": totalPages, + }, + }, + }) +} + +// UpdateUserRolesRequest represents a request to update user roles +type UpdateUserRolesRequest struct { + UserID string `json:"userId"` + Roles []string `json:"roles"` +} + +// UpdateUserRoles updates the roles for a user +func (h *AdminUserHandler) UpdateUserRoles(c *fiber.Ctx) error { + var req UpdateUserRolesRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{ + "error": "Invalid request body", + }) + } + + if req.UserID == "" { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{ + "error": "userId is required", + }) + } + + // Validate roles + validRoles := map[string]bool{ + "admin": true, + "moderator": true, + "supporter": true, + "pterodactyl_admin": true, + "virtfusion_admin": true, + "system_admin": true, + } + + for _, role := range req.Roles { + if !validRoles[role] { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{ + "error": fmt.Sprintf("Invalid role: %s", role), + }) + } + } + + // Update user roles in database + roleStr := strings.Join(req.Roles, ",") + err := h.db.Pool.QueryRow(context.Background(), + `UPDATE users SET roles = $1, updated_at = NOW() WHERE id = $2 RETURNING id, roles`, + roleStr, req.UserID, + ).Scan(nil, nil) + + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{ + "error": "Failed to update user roles", + }) + } + + return c.JSON(fiber.Map{ + "data": fiber.Map{ + "userId": req.UserID, + "roles": req.Roles, + }, + }) +} diff --git a/internal/handlers/auth.go b/internal/handlers/auth.go index 5ac5621..07d989f 100644 --- a/internal/handlers/auth.go +++ b/internal/handlers/auth.go @@ -4,11 +4,13 @@ import ( "database/sql" "errors" "regexp" + "time" "unicode" "github.com/gofiber/fiber/v2" "github.com/rs/zerolog/log" + "github.com/nodebyte/backend/internal/auth" "github.com/nodebyte/backend/internal/database" "github.com/nodebyte/backend/internal/queue" ) @@ -17,13 +19,15 @@ import ( type AuthHandler struct { db *database.DB queueManager *queue.Manager + jwtService *auth.JWTService } // NewAuthHandler creates a new auth handler -func NewAuthHandler(db *database.DB, queueManager *queue.Manager) *AuthHandler { +func NewAuthHandler(db *database.DB, queueManager *queue.Manager, jwtService *auth.JWTService) *AuthHandler { return &AuthHandler{ db: db, queueManager: queueManager, + jwtService: jwtService, } } @@ -35,11 +39,15 @@ type CredentialsRequest struct { // AuthResponse represents an authentication response type AuthResponse struct { - Success bool `json:"success"` - Message string `json:"message,omitempty"` - Error string `json:"error,omitempty"` - User *UserData `json:"user,omitempty"` - Token string `json:"token,omitempty"` + Success bool `json:"success"` + Message string `json:"message,omitempty"` + Error string `json:"error,omitempty"` + User *UserData `json:"user,omitempty"` + Token string `json:"token,omitempty"` // Deprecated: use tokens instead + Tokens *auth.TokenPair `json:"tokens,omitempty"` + AccessToken string `json:"accessToken,omitempty"` // For backward compatibility + RefreshToken string `json:"refreshToken,omitempty"` // For backward compatibility + ExpiresIn int64 `json:"expiresIn,omitempty"` // For backward compatibility } // UserData represents user information returned during auth @@ -103,7 +111,17 @@ func validateEmail(email string) error { } // AuthenticateUser handles user login with credentials -// POST /api/v1/auth/login +// @Summary User Login +// @Description Authenticates a user with email and password, returns JWT tokens +// @Tags Authentication +// @Accept json +// @Produce json +// @Param credentials body CredentialsRequest true "Login credentials" +// @Success 200 {object} AuthResponse "Login successful with JWT tokens" +// @Failure 400 {object} AuthResponse "Invalid request" +// @Failure 401 {object} AuthResponse "Invalid credentials or email not verified" +// @Failure 500 {object} AuthResponse "Internal server error" +// @Router /api/v1/auth/login [post] func (h *AuthHandler) AuthenticateUser(c *fiber.Ctx) error { var req CredentialsRequest if err := c.BodyParser(&req); err != nil { @@ -153,7 +171,42 @@ func (h *AuthHandler) AuthenticateUser(c *fiber.Ctx) error { }) } - // Return user data + // Generate JWT tokens + claims := &auth.Claims{ + UserID: user.ID, + Email: user.Email, + Username: user.Username.String, + FirstName: getStringPointer(user.FirstName), + LastName: getStringPointer(user.LastName), + Roles: user.Roles, + IsPterodactylAdmin: user.IsPterodactylAdmin, + IsVirtfusionAdmin: user.IsVirtfusionAdmin, + IsSystemAdmin: user.IsSystemAdmin, + PterodactylID: getInt64Pointer(user.PterodactylID), + EmailVerified: formatNullTime(user.EmailVerified), + } + + tokenPair, err := h.jwtService.GenerateTokenPair(claims) + if err != nil { + log.Error().Err(err).Str("userID", user.ID).Msg("Failed to generate tokens") + return c.Status(fiber.StatusInternalServerError).JSON(AuthResponse{ + Success: false, + Error: "token_generation_failed", + }) + } + + // Store refresh token in session + expiresAt := time.Now().Add(h.jwtService.GetRefreshTokenTTL()) + _, err = h.db.CreateSession(c.Context(), user.ID, tokenPair.RefreshToken, expiresAt) + if err != nil { + log.Error().Err(err).Str("userID", user.ID).Msg("Failed to create session") + return c.Status(fiber.StatusInternalServerError).JSON(AuthResponse{ + Success: false, + Error: "session_creation_failed", + }) + } + + // Return user data with tokens userData := &UserData{ ID: user.ID, Email: user.Email, @@ -165,13 +218,17 @@ func (h *AuthHandler) AuthenticateUser(c *fiber.Ctx) error { IsVirtfusionAdmin: user.IsVirtfusionAdmin, IsSystemAdmin: user.IsSystemAdmin, PterodactylID: getInt64Pointer(user.PterodactylID), - EmailVerified: formatTime(nil), + EmailVerified: formatNullTime(user.EmailVerified), } return c.Status(fiber.StatusOK).JSON(AuthResponse{ - Success: true, - Message: "Login successful", - User: userData, + Success: true, + Message: "Login successful", + User: userData, + Tokens: tokenPair, + AccessToken: tokenPair.AccessToken, + RefreshToken: tokenPair.RefreshToken, + ExpiresIn: tokenPair.ExpiresIn, }) } @@ -186,7 +243,17 @@ type RegisterUserRequest struct { } // RegisterUser handles user registration -// POST /api/v1/auth/register +// @Summary User Registration +// @Description Registers a new user account and sends verification email +// @Tags Authentication +// @Accept json +// @Produce json +// @Param registration body RegisterUserRequest true "Registration details" +// @Success 201 {object} AuthResponse "User registered successfully" +// @Failure 400 {object} AuthResponse "Invalid request or validation error" +// @Failure 409 {object} AuthResponse "Email already exists" +// @Failure 500 {object} AuthResponse "Internal server error" +// @Router /api/v1/auth/register [post] func (h *AuthHandler) RegisterUser(c *fiber.Ctx) error { var req RegisterUserRequest if err := c.BodyParser(&req); err != nil { @@ -306,7 +373,17 @@ type VerifyEmailRequest struct { } // VerifyEmail handles email verification -// POST /api/v1/auth/verify-email +// @Summary Verify Email +// @Description Verifies user email address with verification token +// @Tags Authentication +// @Accept json +// @Produce json +// @Param verification body map[string]string true "Verification token" example({"token":"abc123"}) +// @Success 200 {object} AuthResponse "Email verified successfully" +// @Failure 400 {object} AuthResponse "Invalid or missing token" +// @Failure 404 {object} AuthResponse "User not found" +// @Failure 500 {object} AuthResponse "Internal server error" +// @Router /api/v1/auth/verify-email [post] func (h *AuthHandler) VerifyEmail(c *fiber.Ctx) error { var req VerifyEmailRequest if err := c.BodyParser(&req); err != nil { @@ -344,7 +421,17 @@ type ForgotPasswordRequest struct { } // ForgotPassword handles forgot password requests -// POST /api/v1/auth/forgot-password +// @Summary Forgot Password +// @Description Sends password reset email with reset token +// @Tags Authentication +// @Accept json +// @Produce json +// @Param email body ForgotPasswordRequest true "User email" +// @Success 200 {object} AuthResponse "Reset email sent" +// @Failure 400 {object} AuthResponse "Invalid email" +// @Failure 404 {object} AuthResponse "User not found" +// @Failure 500 {object} AuthResponse "Internal server error" +// @Router /api/v1/auth/forgot-password [post] func (h *AuthHandler) ForgotPassword(c *fiber.Ctx) error { var req ForgotPasswordRequest if err := c.BodyParser(&req); err != nil { @@ -413,7 +500,17 @@ type ResetPasswordRequest struct { } // ResetPassword handles password reset -// POST /api/v1/auth/reset-password +// @Summary Reset Password +// @Description Resets user password using reset token from email +// @Tags Authentication +// @Accept json +// @Produce json +// @Param reset body ResetPasswordRequest true "Reset token and new password" +// @Success 200 {object} AuthResponse "Password reset successfully" +// @Failure 400 {object} AuthResponse "Invalid request or weak password" +// @Failure 404 {object} AuthResponse "User not found or invalid token" +// @Failure 500 {object} AuthResponse "Internal server error" +// @Router /api/v1/auth/reset-password [post] func (h *AuthHandler) ResetPassword(c *fiber.Ctx) error { var req ResetPasswordRequest if err := c.BodyParser(&req); err != nil { @@ -460,7 +557,15 @@ func (h *AuthHandler) ResetPassword(c *fiber.Ctx) error { } // GetUserByID retrieves user information by ID -// GET /api/v1/auth/users/:id +// @Summary Get User By ID +// @Description Retrieves user information by user ID +// @Tags Authentication +// @Accept json +// @Produce json +// @Param id path string true "User ID" +// @Success 200 {object} AuthResponse "User information" +// @Failure 404 {object} AuthResponse "User not found" +// @Router /api/v1/auth/users/{id} [get] func (h *AuthHandler) GetUserByID(c *fiber.Ctx) error { userID := c.Params("id") @@ -528,18 +633,21 @@ func getPointerValue(p *string) string { return *p } -// Helper function to format timestamps -func formatTime(t *string) *string { - return t -} - // CheckEmailExistsRequest represents a check email request type CheckEmailRequest struct { Email string `json:"email"` } // CheckEmailExists checks if an email is already registered -// GET /api/v1/auth/check-email +// @Summary Check Email Exists +// @Description Checks if an email address is already registered in the system +// @Tags Authentication +// @Accept json +// @Produce json +// @Param email query string true "Email address to check" +// @Success 200 {object} map[string]interface{} "Email availability status" +// @Failure 400 {object} map[string]interface{} "Invalid email" +// @Router /api/v1/auth/check-email [get] func (h *AuthHandler) CheckEmailExists(c *fiber.Ctx) error { email := c.Query("email") @@ -573,7 +681,16 @@ type CredentialsValidateRequest struct { // ValidateCredentials validates credentials and returns user data for NextAuth // This is specifically designed for NextAuth custom provider integration -// POST /api/v1/auth/validate +// @Summary Validate Credentials +// @Description Validates user credentials without creating a session +// @Tags Authentication +// @Accept json +// @Produce json +// @Param credentials body CredentialsValidateRequest true "Credentials to validate" +// @Success 200 {object} map[string]interface{} "Credentials are valid" +// @Failure 400 {object} map[string]interface{} "Invalid request" +// @Failure 401 {object} map[string]interface{} "Invalid credentials" +// @Router /api/v1/auth/validate [post] func (h *AuthHandler) ValidateCredentials(c *fiber.Ctx) error { var req CredentialsValidateRequest if err := c.BodyParser(&req); err != nil { @@ -651,7 +768,17 @@ type MagicLinkRequest struct { } // RequestMagicLink sends a magic link to the user's email -// POST /api/v1/auth/magic-link +// @Summary Request Magic Link +// @Description Sends a passwordless authentication magic link to user's email +// @Tags Authentication +// @Accept json +// @Produce json +// @Param magicLink body MagicLinkRequest true "User email" +// @Success 200 {object} AuthResponse "Magic link sent" +// @Failure 400 {object} AuthResponse "Invalid email" +// @Failure 404 {object} AuthResponse "User not found" +// @Failure 500 {object} AuthResponse "Internal server error" +// @Router /api/v1/auth/magic-link [post] func (h *AuthHandler) RequestMagicLink(c *fiber.Ctx) error { var req MagicLinkRequest if err := c.BodyParser(&req); err != nil { @@ -718,7 +845,17 @@ type MagicLinkVerifyRequest struct { } // VerifyMagicLink verifies a magic link token -// POST /api/v1/auth/magic-link/verify +// @Summary Verify Magic Link +// @Description Verifies magic link token and authenticates user +// @Tags Authentication +// @Accept json +// @Produce json +// @Param verify body MagicLinkVerifyRequest true "Magic link token" +// @Success 200 {object} AuthResponse "Authentication successful with JWT tokens" +// @Failure 400 {object} AuthResponse "Invalid request" +// @Failure 401 {object} AuthResponse "Invalid or expired token" +// @Failure 500 {object} AuthResponse "Internal server error" +// @Router /api/v1/auth/magic-link/verify [post] func (h *AuthHandler) VerifyMagicLink(c *fiber.Ctx) error { var req MagicLinkVerifyRequest if err := c.BodyParser(&req); err != nil { diff --git a/internal/handlers/auth_tokens.go b/internal/handlers/auth_tokens.go new file mode 100644 index 0000000..021b4a7 --- /dev/null +++ b/internal/handlers/auth_tokens.go @@ -0,0 +1,230 @@ +package handlers + +import ( + "strings" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/rs/zerolog/log" + + "github.com/nodebyte/backend/internal/auth" +) + +// RefreshTokenRequest represents a token refresh request +type RefreshTokenRequest struct { + RefreshToken string `json:"refreshToken"` +} + +// RefreshToken handles token refresh +// @Summary Refresh Access Token +// @Description Exchanges a valid refresh token for new access and refresh tokens +// @Tags Authentication +// @Accept json +// @Produce json +// @Param refresh body RefreshTokenRequest true "Refresh token" +// @Success 200 {object} AuthResponse "New tokens generated" +// @Failure 400 {object} AuthResponse "Missing refresh token" +// @Failure 401 {object} AuthResponse "Invalid or expired refresh token" +// @Failure 500 {object} AuthResponse "Internal server error" +// @Router /api/v1/auth/refresh [post] +func (h *AuthHandler) RefreshToken(c *fiber.Ctx) error { + var req RefreshTokenRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(AuthResponse{ + Success: false, + Error: "invalid_request", + }) + } + + if req.RefreshToken == "" { + return c.Status(fiber.StatusBadRequest).JSON(AuthResponse{ + Success: false, + Error: "missing_refresh_token", + }) + } + + // Validate refresh token from database + session, err := h.db.GetSessionByToken(c.Context(), req.RefreshToken) + if err != nil { + return c.Status(fiber.StatusUnauthorized).JSON(AuthResponse{ + Success: false, + Error: "invalid_refresh_token", + }) + } + + // Get user data + user, err := h.db.QueryUserByID(c.Context(), session.UserID) + if err != nil || user == nil { + return c.Status(fiber.StatusUnauthorized).JSON(AuthResponse{ + Success: false, + Error: "user_not_found", + }) + } + + // Check if user is active + if !user.IsActive { + return c.Status(fiber.StatusUnauthorized).JSON(AuthResponse{ + Success: false, + Error: "account_disabled", + }) + } + + // Generate new token pair + claims := &auth.Claims{ + UserID: user.ID, + Email: user.Email, + Username: user.Username.String, + FirstName: getStringPointer(user.FirstName), + LastName: getStringPointer(user.LastName), + Roles: user.Roles, + IsPterodactylAdmin: user.IsPterodactylAdmin, + IsVirtfusionAdmin: user.IsVirtfusionAdmin, + IsSystemAdmin: user.IsSystemAdmin, + PterodactylID: getInt64Pointer(user.PterodactylID), + EmailVerified: formatNullTime(user.EmailVerified), + } + + tokenPair, err := h.jwtService.GenerateTokenPair(claims) + if err != nil { + log.Error().Err(err).Str("userID", user.ID).Msg("Failed to generate tokens") + return c.Status(fiber.StatusInternalServerError).JSON(AuthResponse{ + Success: false, + Error: "token_generation_failed", + }) + } + + // Delete old refresh token + _ = h.db.DeleteSession(c.Context(), req.RefreshToken) + + // Store new refresh token in session + expiresAt := time.Now().Add(h.jwtService.GetRefreshTokenTTL()) + _, err = h.db.CreateSession(c.Context(), user.ID, tokenPair.RefreshToken, expiresAt) + if err != nil { + log.Error().Err(err).Str("userID", user.ID).Msg("Failed to create session") + return c.Status(fiber.StatusInternalServerError).JSON(AuthResponse{ + Success: false, + Error: "session_creation_failed", + }) + } + + log.Info().Str("userID", user.ID).Msg("Token refreshed") + + return c.Status(fiber.StatusOK).JSON(AuthResponse{ + Success: true, + Message: "Token refreshed", + Tokens: tokenPair, + AccessToken: tokenPair.AccessToken, + RefreshToken: tokenPair.RefreshToken, + ExpiresIn: tokenPair.ExpiresIn, + }) +} + +// LogoutRequest represents a logout request +type LogoutRequest struct { + RefreshToken string `json:"refreshToken,omitempty"` +} + +// Logout handles user logout +// @Summary User Logout +// @Description Invalidates refresh token and terminates user session +// @Tags Authentication +// @Accept json +// @Produce json +// @Param logout body LogoutRequest false "Optional refresh token to invalidate" +// @Param Authorization header string false "Bearer token" example(Bearer eyJhbGc...) +// @Success 200 {object} AuthResponse "Logged out successfully" +// @Router /api/v1/auth/logout [post] +func (h *AuthHandler) Logout(c *fiber.Ctx) error { + var req LogoutRequest + _ = c.BodyParser(&req) + + // If refresh token provided, delete that specific session + if req.RefreshToken != "" { + err := h.db.DeleteSession(c.Context(), req.RefreshToken) + if err != nil { + log.Error().Err(err).Msg("Failed to delete session") + } + } + + // Also try to get user ID from JWT and delete all sessions + authHeader := c.Get("Authorization") + if authHeader != "" { + token := strings.TrimPrefix(authHeader, "Bearer ") + claims, err := h.jwtService.ValidateAccessToken(token) + if err == nil && claims != nil { + // Delete all user sessions + _ = h.db.DeleteUserSessions(c.Context(), claims.UserID) + log.Info().Str("userID", claims.UserID).Msg("User logged out") + } + } + + return c.Status(fiber.StatusOK).JSON(AuthResponse{ + Success: true, + Message: "Logged out successfully", + }) +} + +// GetCurrentUser returns the current authenticated user +// @Summary Get Current User +// @Description Returns authenticated user information from JWT token +// @Tags Authentication +// @Accept json +// @Produce json +// @Param Authorization header string true "Bearer token" example(Bearer eyJhbGc...) +// @Success 200 {object} AuthResponse "User data" +// @Failure 401 {object} AuthResponse "Missing or invalid token" +// @Router /api/v1/auth/me [get] +func (h *AuthHandler) GetCurrentUser(c *fiber.Ctx) error { + authHeader := c.Get("Authorization") + if authHeader == "" { + return c.Status(fiber.StatusUnauthorized).JSON(AuthResponse{ + Success: false, + Error: "missing_authorization", + }) + } + + token := strings.TrimPrefix(authHeader, "Bearer ") + claims, err := h.jwtService.ValidateAccessToken(token) + if err != nil { + return c.Status(fiber.StatusUnauthorized).JSON(AuthResponse{ + Success: false, + Error: "invalid_token", + }) + } + + // Get fresh user data from database + user, err := h.db.QueryUserByID(c.Context(), claims.UserID) + if err != nil || user == nil { + return c.Status(fiber.StatusUnauthorized).JSON(AuthResponse{ + Success: false, + Error: "user_not_found", + }) + } + + // Check if user is active + if !user.IsActive { + return c.Status(fiber.StatusUnauthorized).JSON(AuthResponse{ + Success: false, + Error: "account_disabled", + }) + } + + userData := &UserData{ + ID: user.ID, + Email: user.Email, + Username: user.Username.String, + FirstName: getStringPointer(user.FirstName), + LastName: getStringPointer(user.LastName), + Roles: user.Roles, + IsPterodactylAdmin: user.IsPterodactylAdmin, + IsVirtfusionAdmin: user.IsVirtfusionAdmin, + IsSystemAdmin: user.IsSystemAdmin, + PterodactylID: getInt64Pointer(user.PterodactylID), + EmailVerified: formatNullTime(user.EmailVerified), + } + + return c.Status(fiber.StatusOK).JSON(AuthResponse{ + Success: true, + User: userData, + }) +} diff --git a/internal/handlers/dashboard.go b/internal/handlers/dashboard.go new file mode 100644 index 0000000..2578187 --- /dev/null +++ b/internal/handlers/dashboard.go @@ -0,0 +1,583 @@ +package handlers + +import ( + "fmt" + "strings" + + "github.com/gofiber/fiber/v2" + "github.com/nodebyte/backend/internal/database" + "golang.org/x/crypto/bcrypt" +) + +// DashboardHandler handles dashboard API requests +type DashboardHandler struct { + db *database.DB +} + +// NewDashboardHandler creates a new dashboard handler +func NewDashboardHandler(db *database.DB) *DashboardHandler { + return &DashboardHandler{db: db} +} + +// GetDashboardStats retrieves user-specific dashboard statistics +// @Summary Get dashboard stats +// @Description Retrieves statistics for the user's dashboard including server counts and recent servers +// @Tags Dashboard +// @Accept json +// @Produce json +// @Security BearerAuth +// @Success 200 {object} SuccessResponse "Dashboard stats retrieved" +// @Failure 401 {object} ErrorResponse "Unauthorized" +// @Failure 500 {object} ErrorResponse "Internal server error" +// @Router /api/v1/dashboard/stats [get] +func (h *DashboardHandler) GetDashboardStats(c *fiber.Ctx) error { + ctx := c.Context() + + // Get user ID from auth context + userID, ok := c.Locals("userID").(string) + if !ok || userID == "" { + return c.Status(fiber.StatusUnauthorized).JSON(ErrorResponse{ + Success: false, + Error: "User not authenticated", + }) + } + + // Get server counts for this user + var totalServers, onlineServers, offlineServers, suspendedServers int + h.db.Pool.QueryRow(ctx, + `SELECT COUNT(*) FROM servers WHERE "ownerId" = $1`, userID).Scan(&totalServers) + h.db.Pool.QueryRow(ctx, + `SELECT COUNT(*) FROM servers WHERE "ownerId" = $1 AND status = 'RUNNING'`, userID).Scan(&onlineServers) + h.db.Pool.QueryRow(ctx, + `SELECT COUNT(*) FROM servers WHERE "ownerId" = $1 AND status = 'OFFLINE'`, userID).Scan(&offlineServers) + h.db.Pool.QueryRow(ctx, + `SELECT COUNT(*) FROM servers WHERE "ownerId" = $1 AND "isSuspended" = true`, userID).Scan(&suspendedServers) + + // Get recent servers + rows, err := h.db.Pool.Query(ctx, ` + SELECT + s.id, s.uuid, s.name, s.status, + n.name as node_name, + e.name as egg_name, + COALESCE((SELECT value FROM server_properties WHERE "serverId" = s.id AND key = 'memory'), '0') as memory_limit, + COALESCE((SELECT value FROM server_properties WHERE "serverId" = s.id AND key = 'cpu'), '100') as cpu_limit, + COALESCE((SELECT value FROM server_properties WHERE "serverId" = s.id AND key = 'disk'), '0') as disk_limit, + COALESCE((SELECT ip FROM allocations WHERE "serverId" = s.id AND "isAssigned" = true LIMIT 1), '0.0.0.0') as ip, + COALESCE((SELECT port FROM allocations WHERE "serverId" = s.id AND "isAssigned" = true LIMIT 1), 0) as port + FROM servers s + LEFT JOIN nodes n ON s."nodeId" = n.id + LEFT JOIN eggs e ON s."eggId" = e.id + WHERE s."ownerId" = $1 + ORDER BY s."updatedAt" DESC + LIMIT 6 + `, userID) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(ErrorResponse{ + Success: false, + Error: "Failed to fetch recent servers", + }) + } + defer rows.Close() + + type RecentServer struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + Game string `json:"game"` + Node string `json:"node"` + Resources struct { + Memory struct { + Used int `json:"used"` + Limit int `json:"limit"` + } `json:"memory"` + CPU struct { + Used int `json:"used"` + Limit int `json:"limit"` + } `json:"cpu"` + Disk struct { + Used int `json:"used"` + Limit int `json:"limit"` + } `json:"disk"` + } `json:"resources"` + } + + recentServers := []RecentServer{} + for rows.Next() { + var server RecentServer + var memoryLimit, cpuLimit, diskLimit, ip string + var port int + err := rows.Scan( + &server.ID, &server.ID, &server.Name, &server.Status, + &server.Node, &server.Game, + &memoryLimit, &cpuLimit, &diskLimit, &ip, &port, + ) + if err != nil { + continue + } + + // Parse resource limits + var memLimit, cpuLim, diskLim int + fmt.Sscanf(memoryLimit, "%d", &memLimit) + fmt.Sscanf(cpuLimit, "%d", &cpuLim) + fmt.Sscanf(diskLimit, "%d", &diskLim) + + server.Resources.Memory.Limit = memLimit + server.Resources.CPU.Limit = cpuLim + server.Resources.Disk.Limit = diskLim + server.Resources.Memory.Used = 0 // Would come from real-time API + server.Resources.CPU.Used = 0 + server.Resources.Disk.Used = 0 + + recentServers = append(recentServers, server) + } + + // Get user account balance + var accountBalance float64 + h.db.Pool.QueryRow(ctx, + `SELECT COALESCE("accountBalance", 0) FROM users WHERE id = $1`, userID).Scan(&accountBalance) + + // Get open tickets count + var openTickets int + h.db.Pool.QueryRow(ctx, ` + SELECT COUNT(*) FROM support_tickets + WHERE "userId" = $1 AND status IN ('open', 'pending', 'in_progress') + `, userID).Scan(&openTickets) + + return c.JSON(SuccessResponse{ + Success: true, + Data: fiber.Map{ + "servers": fiber.Map{ + "total": totalServers, + "online": onlineServers, + "offline": offlineServers, + "suspended": suspendedServers, + }, + "recentServers": recentServers, + "accountBalance": accountBalance, + "openTickets": openTickets, + }, + }) +} + +// GetUserServers retrieves paginated server list for the authenticated user +// @Summary Get user servers +// @Description Retrieves paginated list of servers owned by the authenticated user with search and filtering +// @Tags Dashboard +// @Accept json +// @Produce json +// @Security BearerAuth +// @Param page query int false "Page number" default(1) +// @Param per_page query int false "Items per page" default(12) +// @Param search query string false "Search query" +// @Param status query string false "Status filter" +// @Success 200 {object} SuccessResponse "Servers retrieved" +// @Failure 401 {object} ErrorResponse "Unauthorized" +// @Failure 500 {object} ErrorResponse "Internal server error" +// @Router /api/v1/dashboard/servers [get] +func (h *DashboardHandler) GetUserServers(c *fiber.Ctx) error { + ctx := c.Context() + + // Get user ID from auth context + userID, ok := c.Locals("userID").(string) + if !ok || userID == "" { + return c.Status(fiber.StatusUnauthorized).JSON(ErrorResponse{ + Success: false, + Error: "User not authenticated", + }) + } + + // Parse query parameters + page := c.QueryInt("page", 1) + if page < 1 { + page = 1 + } + perPage := c.QueryInt("per_page", 12) + if perPage < 1 || perPage > 50 { + perPage = 12 + } + search := c.Query("search", "") + statusFilter := c.Query("status", "") + + // Build WHERE clause + whereClause := `"ownerId" = $1` + args := []interface{}{userID} + argIndex := 2 + + if search != "" { + whereClause += ` AND (name ILIKE $` + fmt.Sprintf("%d", argIndex) + ` OR description ILIKE $` + fmt.Sprintf("%d", argIndex) + `)` + args = append(args, "%"+search+"%") + argIndex++ + } + + if statusFilter != "" && statusFilter != "all" { + statusMap := map[string]string{ + "running": "RUNNING", + "online": "RUNNING", + "offline": "OFFLINE", + "starting": "STARTING", + "stopping": "STOPPING", + "suspended": "SUSPENDED", + "installing": "INSTALLING", + } + if mappedStatus, ok := statusMap[statusFilter]; ok { + if statusFilter == "suspended" { + whereClause += ` AND "isSuspended" = true` + } else { + whereClause += ` AND status = $` + fmt.Sprintf("%d", argIndex) + args = append(args, mappedStatus) + argIndex++ + } + } + } + + // Get total count + var total int + countQuery := `SELECT COUNT(*) FROM servers WHERE ` + whereClause + h.db.Pool.QueryRow(ctx, countQuery, args...).Scan(&total) + + // Calculate pagination + offset := (page - 1) * perPage + totalPages := (total + perPage - 1) / perPage + + // Get servers + query := ` + SELECT + s.id, s.uuid, s.name, s.description, s.status, + n.name as node_name, + e.name as egg_name, + COALESCE((SELECT value FROM server_properties WHERE "serverId" = s.id AND key = 'memory'), '0') as memory_limit, + COALESCE((SELECT value FROM server_properties WHERE "serverId" = s.id AND key = 'cpu'), '100') as cpu_limit, + COALESCE((SELECT value FROM server_properties WHERE "serverId" = s.id AND key = 'disk'), '0') as disk_limit, + COALESCE((SELECT ip FROM allocations WHERE "serverId" = s.id AND "isAssigned" = true LIMIT 1), '0.0.0.0') as ip, + COALESCE((SELECT port FROM allocations WHERE "serverId" = s.id AND "isAssigned" = true LIMIT 1), 0) as port, + s."createdAt" + FROM servers s + LEFT JOIN nodes n ON s."nodeId" = n.id + LEFT JOIN eggs e ON s."eggId" = e.id + WHERE ` + whereClause + ` + ORDER BY s."updatedAt" DESC + LIMIT $` + fmt.Sprintf("%d", argIndex) + ` OFFSET $` + fmt.Sprintf("%d", argIndex+1) + + args = append(args, perPage, offset) + rows, err := h.db.Pool.Query(ctx, query, args...) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(ErrorResponse{ + Success: false, + Error: "Failed to fetch servers", + }) + } + defer rows.Close() + + type Server struct { + ID string `json:"id"` + UUID string `json:"uuid"` + Name string `json:"name"` + Description string `json:"description"` + Status string `json:"status"` + Game string `json:"game"` + Node string `json:"node"` + IP string `json:"ip"` + Port int `json:"port"` + Resources struct { + Memory struct { + Used int `json:"used"` + Limit int `json:"limit"` + } `json:"memory"` + CPU struct { + Used int `json:"used"` + Limit int `json:"limit"` + } `json:"cpu"` + Disk struct { + Used int `json:"used"` + Limit int `json:"limit"` + } `json:"disk"` + } `json:"resources"` + CreatedAt string `json:"createdAt"` + } + + servers := []Server{} + for rows.Next() { + var server Server + var description *string + var memoryLimit, cpuLimit, diskLimit string + err := rows.Scan( + &server.ID, &server.UUID, &server.Name, &description, &server.Status, + &server.Node, &server.Game, + &memoryLimit, &cpuLimit, &diskLimit, + &server.IP, &server.Port, &server.CreatedAt, + ) + if err != nil { + continue + } + + if description != nil { + server.Description = *description + } + + // Parse resource limits + fmt.Sscanf(memoryLimit, "%d", &server.Resources.Memory.Limit) + fmt.Sscanf(cpuLimit, "%d", &server.Resources.CPU.Limit) + fmt.Sscanf(diskLimit, "%d", &server.Resources.Disk.Limit) + server.Resources.Memory.Used = 0 // Would come from real-time API + server.Resources.CPU.Used = 0 + server.Resources.Disk.Used = 0 + + servers = append(servers, server) + } + + return c.JSON(fiber.Map{ + "success": true, + "data": servers, + "meta": fiber.Map{ + "total": total, + "page": page, + "perPage": perPage, + "totalPages": totalPages, + }, + }) +} + +// GetUserAccount retrieves the authenticated user's account information +// @Summary Get user account +// @Description Retrieves account information for the authenticated user +// @Tags Dashboard +// @Accept json +// @Produce json +// @Security BearerAuth +// @Success 200 {object} SuccessResponse "Account info retrieved" +// @Failure 401 {object} ErrorResponse "Unauthorized" +// @Failure 500 {object} ErrorResponse "Internal server error" +// @Router /api/v1/dashboard/account [get] +func (h *DashboardHandler) GetUserAccount(c *fiber.Ctx) error { + ctx := c.Context() + + // Get user ID from auth context + userID, ok := c.Locals("userID").(string) + if !ok || userID == "" { + return c.Status(fiber.StatusUnauthorized).JSON(ErrorResponse{ + Success: false, + Error: "User not authenticated", + }) + } + + // Fetch user account data + var user struct { + ID string `json:"id"` + Username string `json:"username"` + Email string `json:"email"` + FirstName *string `json:"firstName"` + LastName *string `json:"lastName"` + AvatarURL *string `json:"avatarUrl"` + AccountBalance float64 `json:"accountBalance"` + CreatedAt string `json:"createdAt"` + EmailVerified bool `json:"emailVerified"` + } + + err := h.db.Pool.QueryRow(ctx, ` + SELECT id, username, email, "firstName", "lastName", "avatarUrl", + COALESCE("accountBalance", 0), "createdAt", COALESCE("emailVerified", false) + FROM users + WHERE id = $1 + `, userID).Scan( + &user.ID, &user.Username, &user.Email, &user.FirstName, &user.LastName, + &user.AvatarURL, &user.AccountBalance, &user.CreatedAt, &user.EmailVerified, + ) + + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(ErrorResponse{ + Success: false, + Error: "Failed to fetch account", + }) + } + + return c.JSON(SuccessResponse{ + Success: true, + Data: user, + }) +} + +// UpdateUserAccountRequest represents account update request +type UpdateUserAccountRequest struct { + Username *string `json:"username"` + Email *string `json:"email"` + FirstName *string `json:"firstName"` + LastName *string `json:"lastName"` +} + +// UpdateUserAccount updates the authenticated user's account information +// @Summary Update user account +// @Description Updates account information for the authenticated user +// @Tags Dashboard +// @Accept json +// @Produce json +// @Security BearerAuth +// @Param payload body UpdateUserAccountRequest true "Account update data" +// @Success 200 {object} SuccessResponse "Account updated" +// @Failure 400 {object} ErrorResponse "Invalid request" +// @Failure 401 {object} ErrorResponse "Unauthorized" +// @Failure 500 {object} ErrorResponse "Internal server error" +// @Router /api/v1/dashboard/account [put] +func (h *DashboardHandler) UpdateUserAccount(c *fiber.Ctx) error { + ctx := c.Context() + + // Get user ID from auth context + userID, ok := c.Locals("userID").(string) + if !ok || userID == "" { + return c.Status(fiber.StatusUnauthorized).JSON(ErrorResponse{ + Success: false, + Error: "User not authenticated", + }) + } + + var req UpdateUserAccountRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(ErrorResponse{ + Success: false, + Error: "Invalid request body", + }) + } + + // Build update query dynamically + updates := []string{} + args := []interface{}{} + argIndex := 1 + + if req.Username != nil && *req.Username != "" { + updates = append(updates, fmt.Sprintf(`username = $%d`, argIndex)) + args = append(args, *req.Username) + argIndex++ + } + if req.Email != nil && *req.Email != "" { + updates = append(updates, fmt.Sprintf(`email = $%d`, argIndex)) + args = append(args, *req.Email) + argIndex++ + } + if req.FirstName != nil { + updates = append(updates, fmt.Sprintf(`"firstName" = $%d`, argIndex)) + args = append(args, *req.FirstName) + argIndex++ + } + if req.LastName != nil { + updates = append(updates, fmt.Sprintf(`"lastName" = $%d`, argIndex)) + args = append(args, *req.LastName) + argIndex++ + } + + if len(updates) == 0 { + return c.Status(fiber.StatusBadRequest).JSON(ErrorResponse{ + Success: false, + Error: "No fields to update", + }) + } + + // Add updated timestamp + updates = append(updates, `"updatedAt" = NOW()`) + + // Add user ID for WHERE clause + args = append(args, userID) + + query := `UPDATE users SET ` + strings.Join(updates, ", ") + ` WHERE id = $` + fmt.Sprintf("%d", argIndex) + _, err := h.db.Pool.Exec(ctx, query, args...) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(ErrorResponse{ + Success: false, + Error: "Failed to update account", + }) + } + + return c.JSON(SuccessResponse{ + Success: true, + Message: "Account updated successfully", + }) +} + +// ChangePasswordRequest represents password change request +type ChangePasswordRequest struct { + CurrentPassword string `json:"currentPassword"` + NewPassword string `json:"newPassword"` +} + +// ChangePassword changes the authenticated user's password +// @Summary Change user password +// @Description Changes password for the authenticated user +// @Tags Dashboard +// @Accept json +// @Produce json +// @Security BearerAuth +// @Param payload body ChangePasswordRequest true "Password change data" +// @Success 200 {object} SuccessResponse "Password changed" +// @Failure 400 {object} ErrorResponse "Invalid request" +// @Failure 401 {object} ErrorResponse "Unauthorized or wrong password" +// @Failure 500 {object} ErrorResponse "Internal server error" +// @Router /api/v1/dashboard/account/password [put] +func (h *DashboardHandler) ChangePassword(c *fiber.Ctx) error { + ctx := c.Context() + + // Get user ID from auth context + userID, ok := c.Locals("userID").(string) + if !ok || userID == "" { + return c.Status(fiber.StatusUnauthorized).JSON(ErrorResponse{ + Success: false, + Error: "User not authenticated", + }) + } + + var req ChangePasswordRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(ErrorResponse{ + Success: false, + Error: "Invalid request body", + }) + } + + if req.CurrentPassword == "" || req.NewPassword == "" { + return c.Status(fiber.StatusBadRequest).JSON(ErrorResponse{ + Success: false, + Error: "Current and new passwords are required", + }) + } + + // Get current user with password + user, err := h.db.QueryUserByID(ctx, userID) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(ErrorResponse{ + Success: false, + Error: "Failed to verify user", + }) + } + + // Verify current password + if !user.VerifyPassword(req.CurrentPassword) { + return c.Status(fiber.StatusUnauthorized).JSON(ErrorResponse{ + Success: false, + Error: "Current password is incorrect", + }) + } + + // Hash new password + newHash, err := bcrypt.GenerateFromPassword([]byte(req.NewPassword), bcrypt.DefaultCost) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(ErrorResponse{ + Success: false, + Error: "Failed to hash new password", + }) + } + + // Update password + _, err = h.db.Pool.Exec(ctx, ` + UPDATE users + SET password = $1, "updatedAt" = NOW() + WHERE id = $2 + `, newHash, userID) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(ErrorResponse{ + Success: false, + Error: "Failed to update password", + }) + } + + return c.JSON(SuccessResponse{ + Success: true, + Message: "Password changed successfully", + }) +} diff --git a/internal/handlers/routes.go b/internal/handlers/routes.go index 32b103b..299d23b 100644 --- a/internal/handlers/routes.go +++ b/internal/handlers/routes.go @@ -1,8 +1,11 @@ package handlers import ( + "os" + "github.com/gofiber/fiber/v2" + "github.com/nodebyte/backend/internal/auth" "github.com/nodebyte/backend/internal/config" "github.com/nodebyte/backend/internal/database" "github.com/nodebyte/backend/internal/middleware" @@ -11,6 +14,16 @@ import ( // SetupRoutes configures all API routes func SetupRoutes(app *fiber.App, db *database.DB, queueManager *queue.Manager, apiKeyMiddleware *APIKeyMiddleware, cfg *config.Config) { + // Initialize JWT service + jwtSecret := os.Getenv("JWT_SECRET") + if jwtSecret == "" { + jwtSecret = os.Getenv("NEXTAUTH_SECRET") + } + if jwtSecret == "" { + panic("JWT_SECRET or NEXTAUTH_SECRET must be set") + } + jwtService := auth.NewJWTService(jwtSecret) + // Health check route (public - no authentication required) app.Get("/health", healthCheck(db, queueManager)) @@ -20,7 +33,7 @@ func SetupRoutes(app *fiber.App, db *database.DB, queueManager *queue.Manager, a app.Get("/api/panel/counts", statsHandler.GetPanelCounts) // Auth routes (public - no authentication required) - authHandler := NewAuthHandler(db, queueManager) + authHandler := NewAuthHandler(db, queueManager, jwtService) app.Post("/api/v1/auth/login", authHandler.AuthenticateUser) app.Post("/api/v1/auth/register", authHandler.RegisterUser) app.Post("/api/v1/auth/validate", authHandler.ValidateCredentials) @@ -29,6 +42,9 @@ func SetupRoutes(app *fiber.App, db *database.DB, queueManager *queue.Manager, a app.Post("/api/v1/auth/reset-password", authHandler.ResetPassword) app.Post("/api/v1/auth/magic-link", authHandler.RequestMagicLink) app.Post("/api/v1/auth/magic-link/verify", authHandler.VerifyMagicLink) + app.Post("/api/v1/auth/refresh", authHandler.RefreshToken) + app.Post("/api/v1/auth/logout", authHandler.Logout) + app.Get("/api/v1/auth/me", authHandler.GetCurrentUser) app.Get("/api/v1/auth/check-email", authHandler.CheckEmailExists) app.Get("/api/v1/auth/users/:id", authHandler.GetUserByID) @@ -83,6 +99,15 @@ func SetupRoutes(app *fiber.App, db *database.DB, queueManager *queue.Manager, a adminGroup.Patch("/settings/webhooks", webhooksHandler.TestWebhook) adminGroup.Delete("/settings/webhooks", webhooksHandler.DeleteWebhook) + // Admin user management routes + adminUserHandler := NewAdminUserHandler(db) + adminGroup.Get("/users", adminUserHandler.GetUsers) + adminGroup.Post("/users/roles", adminUserHandler.UpdateUserRoles) + + // Admin server management routes + adminServerHandler := NewAdminServerHandler(db) + adminGroup.Get("/servers", adminServerHandler.GetServers) + // Admin sync routes adminSyncHandler := NewAdminSyncHandler(db, queueManager) adminGroup.Get("/sync", adminSyncHandler.GetSyncStatusAdmin) @@ -92,6 +117,18 @@ func SetupRoutes(app *fiber.App, db *database.DB, queueManager *queue.Manager, a adminGroup.Get("/sync/settings", adminSyncHandler.GetSyncSettingsAdmin) adminGroup.Post("/sync/settings", adminSyncHandler.UpdateSyncSettingsAdmin) + // Admin stats routes (already exist) + adminGroup.Get("/stats", statsHandler.GetAdminStats) + + // Bearer-authenticated user routes (dashboard) + userRoutes := app.Group("/api/v1", bearerAuth.Handler()) + dashboardHandler := NewDashboardHandler(db) + userRoutes.Get("/dashboard/stats", dashboardHandler.GetDashboardStats) + userRoutes.Get("/dashboard/servers", dashboardHandler.GetUserServers) + userRoutes.Get("/dashboard/account", dashboardHandler.GetUserAccount) + userRoutes.Put("/dashboard/account", dashboardHandler.UpdateUserAccount) + userRoutes.Put("/dashboard/account/password", dashboardHandler.ChangePassword) + // Protected routes (require API key or bearer token) - AFTER admin routes protected := app.Group("/api", apiKeyMiddleware.Handler()) diff --git a/internal/workers/sync_handler.go b/internal/workers/sync_handler.go index bed9525..90dbd56 100644 --- a/internal/workers/sync_handler.go +++ b/internal/workers/sync_handler.go @@ -346,7 +346,7 @@ func (h *SyncHandler) HandleCleanupLogs(ctx context.Context, task *asynq.Task) e cutoff := time.Now().AddDate(0, 0, -days) - query := `DELETE FROM sync_logs WHERE created_at < $1` + query := `DELETE FROM sync_logs WHERE "startedAt" < $1` result, err := h.db.Pool.Exec(ctx, query, cutoff) if err != nil { return fmt.Errorf("failed to cleanup logs: %w", err) diff --git a/schemas/README.md b/schemas/README.md index 95c6199..4761660 100644 --- a/schemas/README.md +++ b/schemas/README.md @@ -10,49 +10,93 @@ This directory contains modular SQL schema files that define the complete NodeBy |------|--------|---------| | `schema_01_users_auth.sql` | users, sessions, password_reset_tokens, verification_tokens | User authentication and account management | | `schema_02_pterodactyl_sync.sql` | locations, nodes, allocations, nests, eggs, egg_variables, egg_properties | Game panel infrastructure sync data | -| `schema_03_servers.sql` | servers, server_variables, server_properties, server_databases, server_backups | Game server instances and configuration | -| `schema_04_billing.sql` | products, invoices, invoice_items, payments | Billing and commerce system | +| `schema_03_servers.sql` | servers, server_variables, server_properties, server_databases, server_backups | Server instances (game, VPS, email) and configuration | +| `schema_04_billing.sql` | products, invoices, invoice_items, payments | Billing and commerce system with flexible product types | | `schema_05_support_tickets.sql` | support_tickets, support_ticket_replies | Customer support ticketing | | `schema_06_discord_webhooks.sql` | discord_webhooks | Discord webhook management for notifications | | `schema_07_sync_logs.sql` | sync_logs | Synchronization history from panels | | `schema_08_config.sql` | config | System configuration key-value store | -| `schema_hytale.sql` | hytale_oauth_tokens, hytale_game_sessions | Hytale OAuth tokens and game sessions | +| `schema_09_hytale.sql` | hytale_oauth_tokens, hytale_game_sessions | Hytale OAuth tokens and game sessions | +| `schema_10_hytale_audit.sql` | hytale_audit_logs | Hytale token and session audit logging | +| `schema_11_hytale_server_logs.sql` | hytale_server_logs, hytale_log_sync_state | Persistent Hytale game server logs | +| `schema_12_server_subusers.sql` | server_subusers | User-server relationships with flexible permissions | +| `schema_13_hytale_server_link.sql` | hytale_game_sessions (extends) | Link game sessions to specific servers | +| `schema_14_partners.sql` | partners, partner_services, partner_revenue_sharing | Partner management and integration | +| `schema_15_careers.sql` | job_positions, job_applications, job_application_activity | Careers page and job application tracking | ## Quick Start -### Linux / macOS +### All Platforms (Using Make) ```bash -cd backend/schemas -chmod +x init-database.sh -./init-database.sh "postgresql://user:password@localhost:5432/nodebyte" +cd backend + +# Build the database tool +make build-tools + +# Initialize fresh database +make db-init + +# Or run interactive migration +make db-migrate ``` -### Windows +### Using Binary Directly + +```bash +cd backend + +# Build +go build -o bin/db ./cmd/db + +# Initialize +./bin/db init -database "postgresql://user:password@localhost:5432/nodebyte" + +# Migrate +./bin/db migrate -database "postgresql://user:password@localhost:5432/nodebyte" -```cmd -cd backend\schemas -init-database.bat "postgresql://user:password@localhost:5432/nodebyte" +# List schemas +./bin/db list ``` -### Manual Setup +### Development: Migrating New Schemas -If you prefer to execute schemas manually: +After pulling latest code with new schema files, use the migration tool: ```bash -psql postgresql://user:password@localhost:5432/nodebyte -f schema_01_users_auth.sql -psql postgresql://user:password@localhost:5432/nodebyte -f schema_02_pterodactyl_sync.sql -psql postgresql://user:password@localhost:5432/nodebyte -f schema_03_servers.sql -psql postgresql://user:password@localhost:5432/nodebyte -f schema_04_billing.sql -psql postgresql://user:password@localhost:5432/nodebyte -f schema_05_support_tickets.sql -psql postgresql://user:password@localhost:5432/nodebyte -f schema_06_discord_webhooks.sql -psql postgresql://user:password@localhost:5432/nodebyte -f schema_07_sync_logs.sql -psql postgresql://user:password@localhost:5432/nodebyte -f schema_08_config.sql -psql postgresql://user:password@localhost:5432/nodebyte -f schema_hytale.sql +cd backend + +# Interactive: Choose which schemas to migrate +make db-migrate + +# Or migrate specific schema +make db-migrate-schema SCHEMA=schema_14_partners.sql + +# Or reset everything (careful!) +make db-reset +```REM Interactive: Choose which schemas to migrate +migrate-schemas.bat "postgresql://user:password@localhost:5432/nodebyte" ``` -## Schema Details +**Using the Go binary directly:** +```bash +./bin/migrate -database "postgresql://user:password@localhost:5432/nodebyte" +./bin/migrate -database "postgresql://user:password@localhost:5432/nodebyte" -schema schema_14_partners.sql +``` +### Manual Setup + +If you prefer to execute schemas manually: +## For More Information + +See [DATABASE_TOOLS.md](../DATABASE_TOOLS.md) for comprehensive documentation on: +- Building the database tool +- Using all commands (init, migrate, reset, list) +- Environment variables and configuration +- Troubleshooting +- Development workflow + +## Schema Details ### Users & Authentication **Tables:** @@ -173,6 +217,36 @@ psql postgresql://user:password@localhost:5432/nodebyte -f schema_hytale.sql - Session token and identity token storage - Automatic expiry management +### Partners + +**Tables:** +- `partners` - Partner companies and integrations +- `partner_services` - Services provided by partners +- `partner_revenue_sharing` - Commission and payout configuration + +**Key Features:** +- Support for multiple partner types (hosting provider, integration, reseller, affiliate) +- Partnership status tracking (active, inactive, pending, suspended) +- Service configuration storage +- Commission structure management (percentage, fixed, tiered) +- Payout frequency and method tracking +- Featured partner highlighting + +### Careers + +**Tables:** +- `job_positions` - Open job positions +- `job_applications` - Applications from candidates +- `job_application_activity` - Activity log and status changes + +**Key Features:** +- Job position with full details (salary, location, remote options, skills required) +- Multi-step application tracking (new, reviewing, shortlisted, rejected, offered, hired) +- Candidate rating and internal notes +- Application activity logging for audit trail +- Support for custom screening questions via JSON +- Department and employment type filtering + ## Database Requirements - **PostgreSQL 12+** (uses UUID, JSONB, and other modern features) diff --git a/schemas/init-database.bat b/schemas/init-database.bat deleted file mode 100644 index 56be351..0000000 --- a/schemas/init-database.bat +++ /dev/null @@ -1,86 +0,0 @@ -@echo off -REM ============================================================================ -REM NodeByte Database Initialization Script (Windows) -REM ============================================================================ -REM This script initializes the complete NodeByte database schema -REM Usage: init-database.bat -REM -REM Example: -REM init-database.bat "postgresql://user:password@localhost:5432/nodebyte" -REM -REM ============================================================================ - -setlocal enabledelayedexpansion - -if "%~1"=="" ( - echo Usage: %0 ^ - echo. - echo Example: - echo %0 "postgresql://user:password@localhost:5432/nodebyte" - echo. - exit /b 1 -) - -set "DATABASE_URL=%~1" -set "SCRIPT_DIR=%~dp0" - -echo ============================================================================ -echo NodeByte Database Initialization -echo ============================================================================ -echo. -echo Database URL: %DATABASE_URL% (password hidden^) -echo Schema directory: %SCRIPT_DIR% -echo. - -REM Array of schema files in execution order -set "SCHEMAS[0]=schema_01_users_auth.sql" -set "SCHEMAS[1]=schema_02_pterodactyl_sync.sql" -set "SCHEMAS[2]=schema_03_servers.sql" -set "SCHEMAS[3]=schema_04_billing.sql" -set "SCHEMAS[4]=schema_05_support_tickets.sql" -set "SCHEMAS[5]=schema_06_discord_webhooks.sql" -set "SCHEMAS[6]=schema_07_sync_logs.sql" -set "SCHEMAS[7]=schema_08_config.sql" -set "SCHEMAS[8]=schema_09_hytale.sql" - -REM Execute each schema file -for /L %%i in (0,1,8) do ( - set "schema=!SCHEMAS[%%i]!" - set "schema_path=%SCRIPT_DIR%!schema!" - - if not exist "!schema_path!" ( - echo ❌ Schema file not found: !schema_path! - exit /b 1 - ) - - echo 📦 Executing: !schema! - psql "%DATABASE_URL%" -f "!schema_path!" > nul 2>&1 - if !errorlevel! equ 0 ( - echo ✅ !schema! - ) else ( - echo ❌ Failed to execute !schema! - exit /b 1 - ) -) - -echo. -echo ============================================================================ -echo ✅ Database initialization complete! -echo ============================================================================ -echo. -echo Summary: -echo - Users ^& Authentication (users, sessions, password_reset_tokens, verification_tokens^) -echo - Pterodactyl Sync (locations, nodes, allocations, nests, eggs, egg_variables, egg_properties^) -echo - Servers (servers, server_variables, server_properties, server_databases, server_backups^) -echo - Billing (products, invoices, invoice_items, payments^) -echo - Support (support_tickets, support_ticket_replies^) -echo - Discord Webhooks (discord_webhooks^) -echo - Sync Logs (sync_logs^) -echo - Config (config^) -echo - Hytale OAuth (hytale_oauth_tokens, hytale_game_sessions^) -echo. -echo You can now start your backend with: -echo go run ./cmd/api/main.go -echo. - -endlocal diff --git a/schemas/init-database.sh b/schemas/init-database.sh deleted file mode 100644 index 07b11c9..0000000 --- a/schemas/init-database.sh +++ /dev/null @@ -1,86 +0,0 @@ -#!/bin/bash -# ============================================================================ -# NodeByte Database Initialization Script -# ============================================================================ -# This script initializes the complete NodeByte database schema -# Usage: ./init-database.sh -# -# Example: -# ./init-database.sh "postgresql://user:password@localhost:5432/nodebyte" -# -# ============================================================================ - -set -e - -# Check if DATABASE_URL is provided -if [ -z "$1" ]; then - echo "Usage: $0 " - echo "" - echo "Example:" - echo " $0 'postgresql://user:password@localhost:5432/nodebyte'" - echo "" - exit 1 -fi - -DATABASE_URL="$1" -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" - -echo "============================================================================" -echo "NodeByte Database Initialization" -echo "============================================================================" -echo "" -echo "Database URL: $DATABASE_URL (password hidden)" -echo "Schema directory: $SCRIPT_DIR" -echo "" - -# Array of schema files in execution order -SCHEMAS=( - "schema_01_users_auth.sql" - "schema_02_pterodactyl_sync.sql" - "schema_03_servers.sql" - "schema_04_billing.sql" - "schema_05_support_tickets.sql" - "schema_06_discord_webhooks.sql" - "schema_07_sync_logs.sql" - "schema_08_config.sql" - "schema_09_hytale.sql" -) - -# Execute each schema file -for schema in "${SCHEMAS[@]}"; do - schema_path="$SCRIPT_DIR/$schema" - - if [ ! -f "$schema_path" ]; then - echo "❌ Schema file not found: $schema_path" - exit 1 - fi - - echo "📦 Executing: $schema" - psql "$DATABASE_URL" -f "$schema_path" > /dev/null 2>&1 - if [ $? -eq 0 ]; then - echo "✅ $schema" - else - echo "❌ Failed to execute $schema" - exit 1 - fi -done - -echo "" -echo "============================================================================" -echo "✅ Database initialization complete!" -echo "============================================================================" -echo "" -echo "Summary:" -echo " - Users & Authentication (users, sessions, password_reset_tokens, verification_tokens)" -echo " - Pterodactyl Sync (locations, nodes, allocations, nests, eggs, egg_variables, egg_properties)" -echo " - Servers (servers, server_variables, server_properties, server_databases, server_backups)" -echo " - Billing (products, invoices, invoice_items, payments)" -echo " - Support (support_tickets, support_ticket_replies)" -echo " - Discord Webhooks (discord_webhooks)" -echo " - Sync Logs (sync_logs)" -echo " - Config (config)" -echo " - Hytale OAuth (hytale_oauth_tokens, hytale_game_sessions)" -echo "" -echo "You can now start your backend with:" -echo " go run ./cmd/api/main.go" -echo "" diff --git a/schemas/schema_01_users_auth.sql b/schemas/schema_01_users_auth.sql index b7a43bd..6544602 100644 --- a/schemas/schema_01_users_auth.sql +++ b/schemas/schema_01_users_auth.sql @@ -8,61 +8,61 @@ CREATE TABLE IF NOT EXISTS users ( email TEXT NOT NULL UNIQUE, password TEXT, username TEXT, - first_name TEXT, - last_name TEXT, + "firstName" TEXT, + "lastName" TEXT, roles TEXT[] DEFAULT ARRAY['MEMBER'], - is_pterodactyl_admin BOOLEAN DEFAULT false, - is_virtfusion_admin BOOLEAN DEFAULT false, - is_system_admin BOOLEAN DEFAULT false, + "isPterodactylAdmin" BOOLEAN DEFAULT false, + "isVirtfusionAdmin" BOOLEAN DEFAULT false, + "isSystemAdmin" BOOLEAN DEFAULT false, - pterodactyl_id INTEGER, - virtfusion_id INTEGER, + "pterodactylId" INTEGER, + "virtfusionId" INTEGER, - is_migrated BOOLEAN DEFAULT false, - email_verified TIMESTAMP, - is_active BOOLEAN DEFAULT true, + "isMigrated" BOOLEAN DEFAULT false, + "emailVerified" TIMESTAMP, + "isActive" BOOLEAN DEFAULT true, - avatar_url TEXT, - company_name TEXT, - phone_number TEXT, - billing_email TEXT, + "avatarUrl" TEXT, + "companyName" TEXT, + "phoneNumber" TEXT, + "billingEmail" TEXT, - account_balance DECIMAL(10, 2) DEFAULT 0, - account_status TEXT DEFAULT 'active', + "accountBalance" DECIMAL(10, 2) DEFAULT 0, + "accountStatus" TEXT DEFAULT 'active', - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - last_login_at TIMESTAMP, - last_synced_at TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "lastLoginAt" TIMESTAMP, + "lastSyncedAt" TIMESTAMP ); CREATE INDEX IF NOT EXISTS idx_users_email ON users(email); -CREATE INDEX IF NOT EXISTS idx_users_pterodactyl_id ON users(pterodactyl_id); -CREATE INDEX IF NOT EXISTS idx_users_virtfusion_id ON users(virtfusion_id); +CREATE INDEX IF NOT EXISTS idx_users_pterodactyl_id ON users("pterodactylId"); +CREATE INDEX IF NOT EXISTS idx_users_virtfusion_id ON users("virtfusionId"); -- Sessions for user authentication CREATE TABLE IF NOT EXISTS sessions ( id TEXT PRIMARY KEY, - session_token TEXT NOT NULL UNIQUE, - user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "sessionToken" TEXT NOT NULL UNIQUE, + "userId" TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, expires TIMESTAMP NOT NULL, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_sessions_user_id ON sessions(user_id); +CREATE INDEX IF NOT EXISTS idx_sessions_user_id ON sessions("userId"); CREATE INDEX IF NOT EXISTS idx_sessions_expires ON sessions(expires); -- Password reset tokens CREATE TABLE IF NOT EXISTS password_reset_tokens ( id TEXT PRIMARY KEY, - user_id TEXT NOT NULL UNIQUE REFERENCES users(id) ON DELETE CASCADE, + "userId" TEXT NOT NULL UNIQUE REFERENCES users(id) ON DELETE CASCADE, token TEXT NOT NULL UNIQUE, - expires_at TIMESTAMP NOT NULL, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "expiresAt" TIMESTAMP NOT NULL, + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_password_reset_tokens_user_id ON password_reset_tokens(user_id); +CREATE INDEX IF NOT EXISTS idx_password_reset_tokens_user_id ON password_reset_tokens("userId"); CREATE INDEX IF NOT EXISTS idx_password_reset_tokens_token ON password_reset_tokens(token); -- Verification tokens for email verification, password reset, etc. @@ -71,7 +71,7 @@ CREATE TABLE IF NOT EXISTS verification_tokens ( token TEXT NOT NULL UNIQUE, expires TIMESTAMP NOT NULL, type TEXT DEFAULT 'email', - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT verification_tokens_unique UNIQUE (identifier, token) ); diff --git a/schemas/schema_02_pterodactyl_sync.sql b/schemas/schema_02_pterodactyl_sync.sql index e7f0b65..2d5b654 100644 --- a/schemas/schema_02_pterodactyl_sync.sql +++ b/schemas/schema_02_pterodactyl_sync.sql @@ -5,13 +5,13 @@ -- Locations (data center regions) CREATE TABLE IF NOT EXISTS locations ( id INTEGER PRIMARY KEY, - short_code TEXT NOT NULL UNIQUE, + "shortCode" TEXT NOT NULL UNIQUE, description TEXT, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_locations_short_code ON locations(short_code); +CREATE INDEX IF NOT EXISTS idx_locations_short_code ON locations("shortCode"); -- Nodes (physical/virtual servers hosting game servers) CREATE TABLE IF NOT EXISTS nodes ( @@ -21,31 +21,31 @@ CREATE TABLE IF NOT EXISTS nodes ( description TEXT, fqdn TEXT NOT NULL, scheme TEXT DEFAULT 'https', - behind_proxy BOOLEAN DEFAULT false, + "behindProxy" BOOLEAN DEFAULT false, - panel_type TEXT DEFAULT 'pterodactyl', + "panelType" TEXT DEFAULT 'pterodactyl', memory BIGINT NOT NULL, - memory_overallocate INTEGER DEFAULT 0, + "memoryOverallocate" INTEGER DEFAULT 0, disk BIGINT NOT NULL, - disk_overallocate INTEGER DEFAULT 0, + "diskOverallocate" INTEGER DEFAULT 0, - is_public BOOLEAN DEFAULT true, - is_maintenance_mode BOOLEAN DEFAULT false, + "isPublic" BOOLEAN DEFAULT true, + "isMaintenanceMode" BOOLEAN DEFAULT false, - daemon_listen_port INTEGER DEFAULT 8080, - daemon_sftp_port INTEGER DEFAULT 2022, - daemon_base TEXT DEFAULT '/var/lib/pterodactyl/volumes', + "daemonListenPort" INTEGER DEFAULT 8080, + "daemonSftpPort" INTEGER DEFAULT 2022, + "daemonBase" TEXT DEFAULT '/var/lib/pterodactyl/volumes', - location_id INTEGER NOT NULL REFERENCES locations(id) ON DELETE CASCADE, + "locationId" INTEGER NOT NULL REFERENCES locations(id) ON DELETE CASCADE, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); CREATE INDEX IF NOT EXISTS idx_nodes_uuid ON nodes(uuid); -CREATE INDEX IF NOT EXISTS idx_nodes_panel_type ON nodes(panel_type); -CREATE INDEX IF NOT EXISTS idx_nodes_location_id ON nodes(location_id); +CREATE INDEX IF NOT EXISTS idx_nodes_panel_type ON nodes("panelType"); +CREATE INDEX IF NOT EXISTS idx_nodes_location_id ON nodes("locationId"); -- Allocations (IP:Port combinations on nodes) CREATE TABLE IF NOT EXISTS allocations ( @@ -54,19 +54,19 @@ CREATE TABLE IF NOT EXISTS allocations ( port INTEGER NOT NULL, alias TEXT, notes TEXT, - is_assigned BOOLEAN DEFAULT false, + "isAssigned" BOOLEAN DEFAULT false, - node_id INTEGER NOT NULL REFERENCES nodes(id) ON DELETE CASCADE, - server_id TEXT REFERENCES servers(id) ON DELETE SET NULL, + "nodeId" INTEGER NOT NULL REFERENCES nodes(id) ON DELETE CASCADE, + "serverId" TEXT REFERENCES servers(id) ON DELETE SET NULL, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT allocations_unique UNIQUE (ip, port) ); -CREATE INDEX IF NOT EXISTS idx_allocations_node_id ON allocations(node_id); -CREATE INDEX IF NOT EXISTS idx_allocations_server_id ON allocations(server_id); +CREATE INDEX IF NOT EXISTS idx_allocations_node_id ON allocations("nodeId"); +CREATE INDEX IF NOT EXISTS idx_allocations_server_id ON allocations("serverId"); CREATE INDEX IF NOT EXISTS idx_allocations_ip_port ON allocations(ip, port); -- Nests (server type categories like Minecraft, Rust) @@ -76,8 +76,8 @@ CREATE TABLE IF NOT EXISTS nests ( name TEXT NOT NULL, description TEXT, author TEXT, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); CREATE INDEX IF NOT EXISTS idx_nests_uuid ON nests(uuid); @@ -90,12 +90,12 @@ CREATE TABLE IF NOT EXISTS eggs ( description TEXT, author TEXT, - panel_type TEXT DEFAULT 'pterodactyl', + "panelType" TEXT DEFAULT 'pterodactyl', - nest_id INTEGER NOT NULL REFERENCES nests(id) ON DELETE CASCADE, + "nestId" INTEGER NOT NULL REFERENCES nests(id) ON DELETE CASCADE, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); CREATE INDEX IF NOT EXISTS idx_eggs_uuid ON eggs(uuid); diff --git a/schemas/schema_03_servers.sql b/schemas/schema_03_servers.sql index 8a811df..f0ba4b8 100644 --- a/schemas/schema_03_servers.sql +++ b/schemas/schema_03_servers.sql @@ -2,110 +2,132 @@ -- SERVERS & RELATED TABLES -- ============================================================================ --- Servers (game server instances) +-- Servers (game server instances and other hosting services) CREATE TABLE IF NOT EXISTS servers ( id TEXT PRIMARY KEY, - pterodactyl_id INTEGER UNIQUE, - virtfusion_id INTEGER UNIQUE, - uuid TEXT NOT NULL UNIQUE, - uuid_short TEXT, - external_id TEXT UNIQUE, - panel_type TEXT DEFAULT 'pterodactyl', + -- Server type classification (game_server, vps, email, web_hosting, etc.) + "serverType" TEXT NOT NULL DEFAULT 'game_server', - name TEXT NOT NULL, - description TEXT, + -- Pterodactyl panel identifiers (nullable for non-game-server types) + "pterodactylId" INTEGER UNIQUE, + "virtfusionId" INTEGER UNIQUE, + uuid TEXT UNIQUE, -- nullable for non-Pterodactyl servers + "uuidShort" TEXT, + "externalId" TEXT UNIQUE, - status TEXT DEFAULT 'installing', - is_suspended BOOLEAN DEFAULT false, + -- Panel integration details (specific to serverType) + "panelType" TEXT DEFAULT 'pterodactyl', -- pterodactyl, proxmox, cPanel, etc. - product_id TEXT REFERENCES products(id) ON DELETE SET NULL, + -- Pterodactyl specific (for game_server type) + "eggId" INTEGER REFERENCES eggs(id) ON DELETE SET NULL, + "nestId" INTEGER REFERENCES nests(id) ON DELETE SET NULL, - owner_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, - node_id INTEGER NOT NULL REFERENCES nodes(id) ON DELETE RESTRICT, - egg_id INTEGER REFERENCES eggs(id) ON DELETE SET NULL, + -- Core server info + name TEXT NOT NULL, + description TEXT, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - installed_at TIMESTAMP, - last_synced_at TIMESTAMP + -- Server state + status TEXT DEFAULT 'installing', -- installing, online, offline, suspended, error + "isSuspended" BOOLEAN DEFAULT false, + + -- Product and location + "productId" TEXT REFERENCES products(id) ON DELETE SET NULL, + "ownerId" TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "nodeId" INTEGER REFERENCES nodes(id) ON DELETE SET NULL, -- nullable for non-panel servers + + -- Server-type-specific configuration stored as JSON + -- Examples: + -- game_server: {"autoRestart": true, "backupSchedule": "daily"} + -- vps: {"osType": "Ubuntu", "cpuLimit": 2, "ramLimit": 4096} + -- email: {"domainName": "example.com", "mailServerType": "postfix"} + config JSONB DEFAULT '{}', + + -- Timestamps + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "installedAt" TIMESTAMP, + "lastSyncedAt" TIMESTAMP ); CREATE INDEX IF NOT EXISTS idx_servers_uuid ON servers(uuid); -CREATE INDEX IF NOT EXISTS idx_servers_pterodactyl_id ON servers(pterodactyl_id); -CREATE INDEX IF NOT EXISTS idx_servers_virtfusion_id ON servers(virtfusion_id); -CREATE INDEX IF NOT EXISTS idx_servers_panel_type ON servers(panel_type); -CREATE INDEX IF NOT EXISTS idx_servers_owner_id ON servers(owner_id); -CREATE INDEX IF NOT EXISTS idx_servers_node_id ON servers(node_id); -CREATE INDEX IF NOT EXISTS idx_servers_product_id ON servers(product_id); +CREATE INDEX IF NOT EXISTS idx_servers_pterodactyl_id ON servers("pterodactylId"); +CREATE INDEX IF NOT EXISTS idx_servers_virtfusion_id ON servers("virtfusionId"); +CREATE INDEX IF NOT EXISTS idx_servers_server_type ON servers("serverType"); +CREATE INDEX IF NOT EXISTS idx_servers_panel_type ON servers("panelType"); +CREATE INDEX IF NOT EXISTS idx_servers_owner_id ON servers("ownerId"); +CREATE INDEX IF NOT EXISTS idx_servers_node_id ON servers("nodeId"); +CREATE INDEX IF NOT EXISTS idx_servers_product_id ON servers("productId"); +CREATE INDEX IF NOT EXISTS idx_servers_status ON servers(status); +CREATE INDEX IF NOT EXISTS idx_servers_owner_type ON servers("ownerId", "serverType"); -- Server Variables (runtime configuration for servers) CREATE TABLE IF NOT EXISTS server_variables ( id TEXT PRIMARY KEY, - server_id TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, - egg_variable_id INTEGER NOT NULL REFERENCES egg_variables(id) ON DELETE CASCADE, + "serverId" TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, + "eggVariableId" INTEGER NOT NULL REFERENCES egg_variables(id) ON DELETE CASCADE, - variable_value TEXT NOT NULL, + "variableValue" TEXT NOT NULL, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_server_variables_server_id ON server_variables(server_id); -CREATE INDEX IF NOT EXISTS idx_server_variables_egg_variable_id ON server_variables(egg_variable_id); +CREATE INDEX IF NOT EXISTS idx_server_variables_server_id ON server_variables("serverId"); +CREATE INDEX IF NOT EXISTS idx_server_variables_egg_variable_id ON server_variables("eggVariableId"); -- Server Properties (flexible key-value store for server specs) CREATE TABLE IF NOT EXISTS server_properties ( id TEXT PRIMARY KEY, - server_id TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, + "serverId" TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, key TEXT NOT NULL, value TEXT NOT NULL, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - CONSTRAINT server_properties_unique UNIQUE (server_id, key) + CONSTRAINT server_properties_unique UNIQUE ("serverId", key) ); -CREATE INDEX IF NOT EXISTS idx_server_properties_server_id ON server_properties(server_id); +CREATE INDEX IF NOT EXISTS idx_server_properties_server_id ON server_properties("serverId"); -- Server Databases (databases associated with servers) CREATE TABLE IF NOT EXISTS server_databases ( id TEXT PRIMARY KEY, - server_id TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, + "serverId" TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, host TEXT NOT NULL, port INTEGER DEFAULT 3306, - database_name TEXT NOT NULL, + "databaseName" TEXT NOT NULL, username TEXT NOT NULL, password TEXT NOT NULL, - max_connections INTEGER DEFAULT 100, + "maxConnections" INTEGER DEFAULT 100, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_server_databases_server_id ON server_databases(server_id); +CREATE INDEX IF NOT EXISTS idx_server_databases_server_id ON server_databases("serverId"); -- Server Backups (backup records for servers) CREATE TABLE IF NOT EXISTS server_backups ( id TEXT PRIMARY KEY, - server_id TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, + "serverId" TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, - file_name TEXT NOT NULL, - file_size BIGINT, + "fileName" TEXT NOT NULL, + "fileSize" BIGINT, - is_successful BOOLEAN DEFAULT true, - failure_reason TEXT, + "isSuccessful" BOOLEAN DEFAULT true, + "failureReason" TEXT, locked BOOLEAN DEFAULT false, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - completed_at TIMESTAMP, - deleted_at TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "completedAt" TIMESTAMP, + "deletedAt" TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_server_backups_server_id ON server_backups(server_id); -CREATE INDEX IF NOT EXISTS idx_server_backups_created_at ON server_backups(created_at); +CREATE INDEX IF NOT EXISTS idx_server_backups_server_id ON server_backups("serverId"); +CREATE INDEX IF NOT EXISTS idx_server_backups_created_at ON server_backups("createdAt"); diff --git a/schemas/schema_04_billing.sql b/schemas/schema_04_billing.sql index 661d7fd..d542851 100644 --- a/schemas/schema_04_billing.sql +++ b/schemas/schema_04_billing.sql @@ -9,99 +9,126 @@ CREATE TABLE IF NOT EXISTS products ( slug TEXT NOT NULL UNIQUE, description TEXT, - panel_type TEXT DEFAULT 'pterodactyl', + -- Product type classification for services page filtering + -- Values: game_server, vps, email, web_hosting, database, cdn, etc. + "serverType" TEXT NOT NULL DEFAULT 'game_server', - egg_id INTEGER REFERENCES eggs(id) ON DELETE SET NULL, - nest_id INTEGER REFERENCES nests(id) ON DELETE SET NULL, + -- Panel integration (specific to game_server type) + "panelType" TEXT DEFAULT 'pterodactyl', - price DECIMAL(10, 2) NOT NULL, - billing_cycle TEXT DEFAULT 'monthly', - is_free BOOLEAN DEFAULT false, - - specs_memory INTEGER, - specs_disk INTEGER, - specs_cpu DECIMAL(5, 2), - - created_by_id TEXT REFERENCES users(id) ON DELETE SET NULL, + -- Pterodactyl specific (for game_server type) + "eggId" INTEGER REFERENCES eggs(id) ON DELETE SET NULL, + "nestId" INTEGER REFERENCES nests(id) ON DELETE SET NULL, - is_active BOOLEAN DEFAULT true, - is_featured BOOLEAN DEFAULT false, - - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - deleted_at TIMESTAMP + -- Pricing + price DECIMAL(10, 2) NOT NULL, + "billingCycle" TEXT DEFAULT 'monthly', + "isFree" BOOLEAN DEFAULT false, + + -- Flexible specs for different product types + -- game_server: memory (MB), disk (GB), cpu (cores) + -- vps: memory (GB), disk (GB), vcpu (cores), bandwidth (Gbps) + -- email: storage (GB), mailboxes, etc + "specsMemory" INTEGER, -- In MB for game servers, GB for VPS + "specsDisk" INTEGER, -- In GB + "specsCpu" DECIMAL(5, 2), -- In cores + "specsBandwidth" DECIMAL(5, 2), -- In Gbps (for VPS/hosting) + "specsMailboxes" INTEGER, -- For email hosting + "specsStorage" INTEGER, -- In GB (for email/storage) + + -- Features stored as JSONB for flexibility across product types + -- Examples: + -- game_server: {"autoRestart": true, "dailyBackups": true, "console": true} + -- vps: {"rootAccess": true, "snapshots": true, "firewalling": true, "ddosProtection": false} + -- email: {"spamFilter": true, "virusScanning": true, "webmail": true} + features JSONB DEFAULT '{}', + + -- Optional description of what's included + "includeDescription" TEXT, + + -- Metadata + "createdById" TEXT REFERENCES users(id) ON DELETE SET NULL, + + "isActive" BOOLEAN DEFAULT true, + "isFeatured" BOOLEAN DEFAULT false, + + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "deletedAt" TIMESTAMP ); CREATE INDEX IF NOT EXISTS idx_products_slug ON products(slug); -CREATE INDEX IF NOT EXISTS idx_products_egg_id ON products(egg_id); -CREATE INDEX IF NOT EXISTS idx_products_created_by_id ON products(created_by_id); -CREATE INDEX IF NOT EXISTS idx_products_is_active ON products(is_active); +CREATE INDEX IF NOT EXISTS idx_products_server_type ON products("serverType"); +CREATE INDEX IF NOT EXISTS idx_products_egg_id ON products("eggId"); +CREATE INDEX IF NOT EXISTS idx_products_created_by_id ON products("createdById"); +CREATE INDEX IF NOT EXISTS idx_products_is_active ON products("isActive"); +CREATE INDEX IF NOT EXISTS idx_products_active_featured ON products("isActive", "isFeatured") WHERE "isActive" = true; -- Invoices (billing invoices) CREATE TABLE IF NOT EXISTS invoices ( id TEXT PRIMARY KEY, - invoice_number TEXT NOT NULL UNIQUE, - user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "invoiceNumber" TEXT NOT NULL UNIQUE, + "userId" TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, amount DECIMAL(10, 2) NOT NULL, tax DECIMAL(10, 2) DEFAULT 0, total DECIMAL(10, 2) NOT NULL, status TEXT DEFAULT 'unpaid', - payment_method TEXT, - paid_at TIMESTAMP, - due_at TIMESTAMP, + "paymentMethod" TEXT, + "paidAt" TIMESTAMP, + "dueAt" TIMESTAMP, notes TEXT, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - deleted_at TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "deletedAt" TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_invoices_user_id ON invoices(user_id); -CREATE INDEX IF NOT EXISTS idx_invoices_invoice_number ON invoices(invoice_number); +CREATE INDEX IF NOT EXISTS idx_invoices_user_id ON invoices("userId"); +CREATE INDEX IF NOT EXISTS idx_invoices_invoice_number ON invoices("invoiceNumber"); CREATE INDEX IF NOT EXISTS idx_invoices_status ON invoices(status); -CREATE INDEX IF NOT EXISTS idx_invoices_created_at ON invoices(created_at); +CREATE INDEX IF NOT EXISTS idx_invoices_created_at ON invoices("createdAt"); -- Invoice Items (line items in invoices) CREATE TABLE IF NOT EXISTS invoice_items ( id TEXT PRIMARY KEY, - invoice_id TEXT NOT NULL REFERENCES invoices(id) ON DELETE CASCADE, + "invoiceId" TEXT NOT NULL REFERENCES invoices(id) ON DELETE CASCADE, description TEXT NOT NULL, quantity INTEGER DEFAULT 1, - unit_price DECIMAL(10, 2) NOT NULL, + "unitPrice" DECIMAL(10, 2) NOT NULL, amount DECIMAL(10, 2) NOT NULL, - product_id TEXT REFERENCES products(id) ON DELETE SET NULL, - server_id TEXT REFERENCES servers(id) ON DELETE SET NULL, + "productId" TEXT REFERENCES products(id) ON DELETE SET NULL, + "serverId" TEXT REFERENCES servers(id) ON DELETE SET NULL, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_invoice_items_invoice_id ON invoice_items(invoice_id); -CREATE INDEX IF NOT EXISTS idx_invoice_items_product_id ON invoice_items(product_id); -CREATE INDEX IF NOT EXISTS idx_invoice_items_server_id ON invoice_items(server_id); +CREATE INDEX IF NOT EXISTS idx_invoice_items_invoice_id ON invoice_items("invoiceId"); +CREATE INDEX IF NOT EXISTS idx_invoice_items_product_id ON invoice_items("productId"); +CREATE INDEX IF NOT EXISTS idx_invoice_items_server_id ON invoice_items("serverId"); -- Payments (payment records) CREATE TABLE IF NOT EXISTS payments ( id TEXT PRIMARY KEY, - invoice_id TEXT NOT NULL REFERENCES invoices(id) ON DELETE CASCADE, - user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "invoiceId" TEXT NOT NULL REFERENCES invoices(id) ON DELETE CASCADE, + "userId" TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, amount DECIMAL(10, 2) NOT NULL, - payment_method TEXT NOT NULL, + "paymentMethod" TEXT NOT NULL, - external_transaction_id TEXT UNIQUE, + "externalTransactionId" TEXT UNIQUE, status TEXT DEFAULT 'completed', notes TEXT, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_payments_invoice_id ON payments(invoice_id); -CREATE INDEX IF NOT EXISTS idx_payments_user_id ON payments(user_id); -CREATE INDEX IF NOT EXISTS idx_payments_external_transaction_id ON payments(external_transaction_id); +CREATE INDEX IF NOT EXISTS idx_payments_invoice_id ON payments("invoiceId"); +CREATE INDEX IF NOT EXISTS idx_payments_user_id ON payments("userId"); +CREATE INDEX IF NOT EXISTS idx_payments_external_transaction_id ON payments("externalTransactionId"); diff --git a/schemas/schema_05_support_tickets.sql b/schemas/schema_05_support_tickets.sql index adf64c9..0256c4e 100644 --- a/schemas/schema_05_support_tickets.sql +++ b/schemas/schema_05_support_tickets.sql @@ -5,10 +5,10 @@ -- Support Tickets CREATE TABLE IF NOT EXISTS support_tickets ( id TEXT PRIMARY KEY, - ticket_number TEXT NOT NULL UNIQUE, - user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "ticketNumber" TEXT NOT NULL UNIQUE, + "userId" TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, - server_id TEXT REFERENCES servers(id) ON DELETE SET NULL, + "serverId" TEXT REFERENCES servers(id) ON DELETE SET NULL, title TEXT NOT NULL, description TEXT NOT NULL, @@ -17,34 +17,34 @@ CREATE TABLE IF NOT EXISTS support_tickets ( priority TEXT DEFAULT 'medium', category TEXT, - assigned_to_id TEXT REFERENCES users(id) ON DELETE SET NULL, + "assignedToId" TEXT REFERENCES users(id) ON DELETE SET NULL, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - closed_at TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "closedAt" TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_support_tickets_user_id ON support_tickets(user_id); -CREATE INDEX IF NOT EXISTS idx_support_tickets_server_id ON support_tickets(server_id); -CREATE INDEX IF NOT EXISTS idx_support_tickets_assigned_to_id ON support_tickets(assigned_to_id); +CREATE INDEX IF NOT EXISTS idx_support_tickets_user_id ON support_tickets("userId"); +CREATE INDEX IF NOT EXISTS idx_support_tickets_server_id ON support_tickets("serverId"); +CREATE INDEX IF NOT EXISTS idx_support_tickets_assigned_to_id ON support_tickets("assignedToId"); CREATE INDEX IF NOT EXISTS idx_support_tickets_status ON support_tickets(status); CREATE INDEX IF NOT EXISTS idx_support_tickets_priority ON support_tickets(priority); -CREATE INDEX IF NOT EXISTS idx_support_tickets_created_at ON support_tickets(created_at); +CREATE INDEX IF NOT EXISTS idx_support_tickets_created_at ON support_tickets("createdAt"); -- Support Ticket Replies CREATE TABLE IF NOT EXISTS support_ticket_replies ( id TEXT PRIMARY KEY, - ticket_id TEXT NOT NULL REFERENCES support_tickets(id) ON DELETE CASCADE, - user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "ticketId" TEXT NOT NULL REFERENCES support_tickets(id) ON DELETE CASCADE, + "userId" TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, message TEXT NOT NULL, - is_internal BOOLEAN DEFAULT false, + "isInternal" BOOLEAN DEFAULT false, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - deleted_at TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "deletedAt" TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_support_ticket_replies_ticket_id ON support_ticket_replies(ticket_id); -CREATE INDEX IF NOT EXISTS idx_support_ticket_replies_user_id ON support_ticket_replies(user_id); -CREATE INDEX IF NOT EXISTS idx_support_ticket_replies_created_at ON support_ticket_replies(created_at); +CREATE INDEX IF NOT EXISTS idx_support_ticket_replies_ticket_id ON support_ticket_replies("ticketId"); +CREATE INDEX IF NOT EXISTS idx_support_ticket_replies_user_id ON support_ticket_replies("userId"); +CREATE INDEX IF NOT EXISTS idx_support_ticket_replies_created_at ON support_ticket_replies("createdAt"); diff --git a/schemas/schema_06_discord_webhooks.sql b/schemas/schema_06_discord_webhooks.sql index 92750bc..d58e464 100644 --- a/schemas/schema_06_discord_webhooks.sql +++ b/schemas/schema_06_discord_webhooks.sql @@ -5,39 +5,39 @@ -- Discord Webhooks (webhook management for Discord notifications) CREATE TABLE IF NOT EXISTS discord_webhooks ( id TEXT PRIMARY KEY, - user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "userId" TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, name TEXT NOT NULL, - webhook_url TEXT NOT NULL, - webhook_id TEXT NOT NULL UNIQUE, + "webhookUrl" TEXT NOT NULL, + "webhookId" TEXT NOT NULL UNIQUE, type TEXT NOT NULL DEFAULT 'server_events', scope TEXT NOT NULL DEFAULT 'account', - server_id TEXT REFERENCES servers(id) ON DELETE CASCADE, + "serverId" TEXT REFERENCES servers(id) ON DELETE CASCADE, - is_active BOOLEAN DEFAULT true, + "isActive" BOOLEAN DEFAULT true, - notify_on_server_start BOOLEAN DEFAULT true, - notify_on_server_stop BOOLEAN DEFAULT true, - notify_on_server_crash BOOLEAN DEFAULT true, - notify_on_backup_complete BOOLEAN DEFAULT true, - notify_on_backup_failed BOOLEAN DEFAULT true, - notify_on_console_output BOOLEAN DEFAULT false, - notify_on_player_join BOOLEAN DEFAULT false, - notify_on_player_leave BOOLEAN DEFAULT false, + "notifyOnServerStart" BOOLEAN DEFAULT true, + "notifyOnServerStop" BOOLEAN DEFAULT true, + "notifyOnServerCrash" BOOLEAN DEFAULT true, + "notifyOnBackupComplete" BOOLEAN DEFAULT true, + "notifyOnBackupFailed" BOOLEAN DEFAULT true, + "notifyOnConsoleOutput" BOOLEAN DEFAULT false, + "notifyOnPlayerJoin" BOOLEAN DEFAULT false, + "notifyOnPlayerLeave" BOOLEAN DEFAULT false, - custom_message TEXT, + "customMessage" TEXT, - last_used_at TIMESTAMP, + "lastUsedAt" TIMESTAMP, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - deleted_at TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "deletedAt" TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_discord_webhooks_user_id ON discord_webhooks(user_id); -CREATE INDEX IF NOT EXISTS idx_discord_webhooks_server_id ON discord_webhooks(server_id); -CREATE INDEX IF NOT EXISTS idx_discord_webhooks_webhook_id ON discord_webhooks(webhook_id); +CREATE INDEX IF NOT EXISTS idx_discord_webhooks_user_id ON discord_webhooks("userId"); +CREATE INDEX IF NOT EXISTS idx_discord_webhooks_server_id ON discord_webhooks("serverId"); +CREATE INDEX IF NOT EXISTS idx_discord_webhooks_webhook_id ON discord_webhooks("webhookId"); CREATE INDEX IF NOT EXISTS idx_discord_webhooks_type ON discord_webhooks(type); -CREATE INDEX IF NOT EXISTS idx_discord_webhooks_is_active ON discord_webhooks(is_active); +CREATE INDEX IF NOT EXISTS idx_discord_webhooks_is_active ON discord_webhooks("isActive"); diff --git a/schemas/schema_07_sync_logs.sql b/schemas/schema_07_sync_logs.sql index 2238b94..36305d4 100644 --- a/schemas/schema_07_sync_logs.sql +++ b/schemas/schema_07_sync_logs.sql @@ -5,25 +5,25 @@ -- Sync Logs (track synchronization history from panels) CREATE TABLE IF NOT EXISTS sync_logs ( id TEXT PRIMARY KEY, - sync_type TEXT NOT NULL, + "syncType" TEXT NOT NULL, status TEXT DEFAULT 'pending', - records_synced INTEGER DEFAULT 0, - records_failed INTEGER DEFAULT 0, - records_total INTEGER DEFAULT 0, + "recordsSynced" INTEGER DEFAULT 0, + "recordsFailed" INTEGER DEFAULT 0, + "recordsTotal" INTEGER DEFAULT 0, - started_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - completed_at TIMESTAMP, - duration_seconds INTEGER, + "startedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "completedAt" TIMESTAMP, + "durationSeconds" INTEGER, - error_message TEXT, + "errorMessage" TEXT, metadata JSONB, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); -CREATE INDEX IF NOT EXISTS idx_sync_logs_sync_type ON sync_logs(sync_type); +CREATE INDEX IF NOT EXISTS idx_sync_logs_sync_type ON sync_logs("syncType"); CREATE INDEX IF NOT EXISTS idx_sync_logs_status ON sync_logs(status); -CREATE INDEX IF NOT EXISTS idx_sync_logs_created_at ON sync_logs(created_at); -CREATE INDEX IF NOT EXISTS idx_sync_logs_started_at ON sync_logs(started_at); +CREATE INDEX IF NOT EXISTS idx_sync_logs_created_at ON sync_logs("createdAt"); +CREATE INDEX IF NOT EXISTS idx_sync_logs_started_at ON sync_logs("startedAt"); diff --git a/schemas/schema_08_config.sql b/schemas/schema_08_config.sql index 8c874ac..ef898ab 100644 --- a/schemas/schema_08_config.sql +++ b/schemas/schema_08_config.sql @@ -8,10 +8,10 @@ CREATE TABLE IF NOT EXISTS config ( key TEXT NOT NULL UNIQUE, value TEXT NOT NULL, description TEXT, - is_public BOOLEAN DEFAULT false, + "isPublic" BOOLEAN DEFAULT false, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); CREATE INDEX IF NOT EXISTS idx_config_key ON config(key); diff --git a/schemas/schema_09_hytale.sql b/schemas/schema_09_hytale.sql index 7d0ccf5..2a5e794 100644 --- a/schemas/schema_09_hytale.sql +++ b/schemas/schema_09_hytale.sql @@ -2,34 +2,34 @@ -- Stores OAuth tokens obtained from Hytale OAuth provider CREATE TABLE IF NOT EXISTS hytale_oauth_tokens ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - account_id UUID NOT NULL UNIQUE, - access_token TEXT NOT NULL, - refresh_token TEXT NOT NULL, - access_token_expiry TIMESTAMP NOT NULL, - profile_uuid UUID, + "accountId" UUID NOT NULL UNIQUE, + "accessToken" TEXT NOT NULL, + "refreshToken" TEXT NOT NULL, + "accessTokenExpiry" TIMESTAMP NOT NULL, + "profileUuid" UUID, scope TEXT NOT NULL DEFAULT 'openid offline auth:server', - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - last_refreshed_at TIMESTAMP, - CONSTRAINT hytale_oauth_tokens_account_id_key UNIQUE (account_id) + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "lastRefreshedAt" TIMESTAMP, + CONSTRAINT hytale_oauth_tokens_account_id_key UNIQUE ("accountId") ); -CREATE INDEX IF NOT EXISTS idx_hytale_oauth_tokens_account_id ON hytale_oauth_tokens(account_id); -CREATE INDEX IF NOT EXISTS idx_hytale_oauth_tokens_access_token_expiry ON hytale_oauth_tokens(access_token_expiry); +CREATE INDEX IF NOT EXISTS idx_hytale_oauth_tokens_account_id ON hytale_oauth_tokens("accountId"); +CREATE INDEX IF NOT EXISTS idx_hytale_oauth_tokens_access_token_expiry ON hytale_oauth_tokens("accessTokenExpiry"); -- Hytale Game Sessions -- Stores active game sessions for Hytale servers CREATE TABLE IF NOT EXISTS hytale_game_sessions ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - account_id UUID NOT NULL, - profile_uuid UUID NOT NULL, - session_token TEXT NOT NULL, - identity_token TEXT NOT NULL, - expires_at TIMESTAMP NOT NULL, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - CONSTRAINT hytale_game_sessions_account_profile_key UNIQUE (account_id, profile_uuid) + "accountId" UUID NOT NULL, + "profileUuid" UUID NOT NULL, + "sessionToken" TEXT NOT NULL, + "identityToken" TEXT NOT NULL, + "expiresAt" TIMESTAMP NOT NULL, + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT hytale_game_sessions_account_profile_key UNIQUE ("accountId", "profileUuid") ); -CREATE INDEX IF NOT EXISTS idx_hytale_game_sessions_account_id ON hytale_game_sessions(account_id); -CREATE INDEX IF NOT EXISTS idx_hytale_game_sessions_expires_at ON hytale_game_sessions(expires_at); +CREATE INDEX IF NOT EXISTS idx_hytale_game_sessions_account_id ON hytale_game_sessions("accountId"); +CREATE INDEX IF NOT EXISTS idx_hytale_game_sessions_expires_at ON hytale_game_sessions("expiresAt"); diff --git a/schemas/schema_10_hytale_audit.sql b/schemas/schema_10_hytale_audit.sql index a3a3259..033b984 100644 --- a/schemas/schema_10_hytale_audit.sql +++ b/schemas/schema_10_hytale_audit.sql @@ -6,34 +6,34 @@ CREATE TABLE IF NOT EXISTS hytale_audit_logs ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), -- Account and profile information - account_id UUID NOT NULL, - profile_id UUID, + "accountId" UUID NOT NULL, + "profileId" UUID, -- Event type and details - event_type VARCHAR(50) NOT NULL, + "eventType" VARCHAR(50) NOT NULL, details TEXT, -- JSON details if needed -- Request context - ip_address INET, - user_agent TEXT, + "ipAddress" INET, + "userAgent" TEXT, -- Timestamps - created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, + "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, -- Indexes for efficient querying - FOREIGN KEY (account_id) REFERENCES hytale_oauth_tokens(account_id) ON DELETE CASCADE + FOREIGN KEY ("accountId") REFERENCES hytale_oauth_tokens("accountId") ON DELETE CASCADE ); -- Create indexes for common queries -CREATE INDEX IF NOT EXISTS idx_hytale_audit_account_id ON hytale_audit_logs(account_id); -CREATE INDEX IF NOT EXISTS idx_hytale_audit_event_type ON hytale_audit_logs(event_type); -CREATE INDEX IF NOT EXISTS idx_hytale_audit_created_at ON hytale_audit_logs(created_at DESC); -CREATE INDEX IF NOT EXISTS idx_hytale_audit_account_created ON hytale_audit_logs(account_id, created_at DESC); +CREATE INDEX IF NOT EXISTS idx_hytale_audit_account_id ON hytale_audit_logs("accountId"); +CREATE INDEX IF NOT EXISTS idx_hytale_audit_event_type ON hytale_audit_logs("eventType"); +CREATE INDEX IF NOT EXISTS idx_hytale_audit_created_at ON hytale_audit_logs("createdAt" DESC); +CREATE INDEX IF NOT EXISTS idx_hytale_audit_account_created ON hytale_audit_logs("accountId", "createdAt" DESC); -- Add a constraint to validate event types ALTER TABLE hytale_audit_logs ADD CONSTRAINT check_valid_event_type CHECK ( - event_type IN ( + "eventType" IN ( 'TOKEN_CREATED', 'TOKEN_REFRESHED', 'TOKEN_DELETED', diff --git a/schemas/schema_11_hytale_server_logs.sql b/schemas/schema_11_hytale_server_logs.sql index 5c6913e..b34e315 100644 --- a/schemas/schema_11_hytale_server_logs.sql +++ b/schemas/schema_11_hytale_server_logs.sql @@ -3,42 +3,42 @@ CREATE TABLE hytale_server_logs ( id BIGSERIAL PRIMARY KEY, - server_uuid TEXT NOT NULL, - account_id VARCHAR(255) NOT NULL, - log_line TEXT NOT NULL, - log_timestamp TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, - created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + "serverUuid" TEXT NOT NULL, + "accountId" VARCHAR(255) NOT NULL, + "logLine" TEXT NOT NULL, + "logTimestamp" TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + "createdAt" TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, -- Indexes for common queries - CONSTRAINT fk_server_uuid FOREIGN KEY (server_uuid) + CONSTRAINT fk_server_uuid FOREIGN KEY ("serverUuid") REFERENCES servers(uuid) ON DELETE CASCADE ); -- Create indexes for efficient log retrieval -CREATE INDEX idx_hytale_server_logs_server_uuid ON hytale_server_logs(server_uuid DESC); -CREATE INDEX idx_hytale_server_logs_created_at ON hytale_server_logs(created_at DESC); -CREATE INDEX idx_hytale_server_logs_server_created ON hytale_server_logs(server_uuid DESC, created_at DESC); -CREATE INDEX idx_hytale_server_logs_account_id ON hytale_server_logs(account_id); +CREATE INDEX idx_hytale_server_logs_server_uuid ON hytale_server_logs("serverUuid" DESC); +CREATE INDEX idx_hytale_server_logs_created_at ON hytale_server_logs("createdAt" DESC); +CREATE INDEX idx_hytale_server_logs_server_created ON hytale_server_logs("serverUuid" DESC, "createdAt" DESC); +CREATE INDEX idx_hytale_server_logs_account_id ON hytale_server_logs("accountId"); -- Create a table to track log persistence state (last synced timestamp) CREATE TABLE hytale_log_sync_state ( id SERIAL PRIMARY KEY, - server_uuid TEXT NOT NULL UNIQUE, - last_sync_time TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, - last_line_id BIGINT DEFAULT 0, - sync_status VARCHAR(50) DEFAULT 'pending', -- pending, syncing, success, failed - error_message TEXT, - updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + "serverUuid" TEXT NOT NULL UNIQUE, + "lastSyncTime" TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + "lastLineId" BIGINT DEFAULT 0, + "syncStatus" VARCHAR(50) DEFAULT 'pending', -- pending, syncing, success, failed + "errorMessage" TEXT, + "updatedAt" TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, - CONSTRAINT fk_sync_server_uuid FOREIGN KEY (server_uuid) + CONSTRAINT fk_sync_server_uuid FOREIGN KEY ("serverUuid") REFERENCES servers(uuid) ON DELETE CASCADE ); -- Create index for sync state queries -CREATE INDEX idx_hytale_log_sync_state_server_uuid ON hytale_log_sync_state(server_uuid); -CREATE INDEX idx_hytale_log_sync_state_status ON hytale_log_sync_state(sync_status); +CREATE INDEX idx_hytale_log_sync_state_server_uuid ON hytale_log_sync_state("serverUuid"); +CREATE INDEX idx_hytale_log_sync_state_status ON hytale_log_sync_state("syncStatus"); -- Add comment for documentation COMMENT ON TABLE hytale_server_logs IS 'Stores persistent console output logs from Hytale game servers. Allows historical log retrieval and persistence across Panel page refreshes.'; -COMMENT ON COLUMN hytale_server_logs.log_timestamp IS 'Timestamp when the log line was generated by the game server'; +COMMENT ON COLUMN hytale_server_logs."logTimestamp" IS 'Timestamp when the log line was generated by the game server'; COMMENT ON TABLE hytale_log_sync_state IS 'Tracks synchronization state between Hytale logs and local persistent storage.'; diff --git a/schemas/schema_12_server_subusers.sql b/schemas/schema_12_server_subusers.sql index ee54fea..f6b5ec0 100644 --- a/schemas/schema_12_server_subusers.sql +++ b/schemas/schema_12_server_subusers.sql @@ -7,29 +7,45 @@ -- with permission tracking for access control and auditing CREATE TABLE IF NOT EXISTS server_subusers ( id TEXT PRIMARY KEY, - server_id TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, - user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "serverId" TEXT NOT NULL REFERENCES servers(id) ON DELETE CASCADE, + "userId" TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, - -- Permissions array from Pterodactyl Client API - -- Examples: ["control.console", "control.start", "file.read", "user.create"] + -- Permissions can be: + -- 1. Pterodactyl: ["control.console", "control.start", "file.read", "user.create"] + -- 2. VPS/Linux: ["ssh", "sudo", "file_manager", "package_manager"] + -- 3. Email: ["manage_mailboxes", "manage_domains", "manage_forwarding"] permissions TEXT[] DEFAULT '{}', + -- Access level for non-permission-based systems + -- Values: owner, admin, user, viewer, billing_only + "accessLevel" TEXT DEFAULT 'user', + + -- Type-specific access configuration stored as JSON + -- Examples: + -- pterodactyl: {"canCreateSubusers": false, "canDeleteServer": false} + -- vps: {"sshKeyOnly": true, "ipWhitelistEnabled": false, "ipWhitelist": ["1.2.3.4"]} + -- email: {"canCreateMailboxes": true, "maxMailboxes": 10, "maxStorage": 100} + "accessConfig" JSONB DEFAULT '{}', + -- Metadata - is_owner BOOLEAN DEFAULT false, - added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - last_synced_at TIMESTAMP, + "isOwner" BOOLEAN DEFAULT false, + "addedAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + "lastSyncedAt" TIMESTAMP, -- Prevent duplicate user-server relationships - CONSTRAINT server_subusers_unique UNIQUE (server_id, user_id) + CONSTRAINT server_subusers_unique UNIQUE ("serverId", "userId") ); -- Indexes for efficient queries -CREATE INDEX IF NOT EXISTS idx_server_subusers_server ON server_subusers(server_id); -CREATE INDEX IF NOT EXISTS idx_server_subusers_user ON server_subusers(user_id); -CREATE INDEX IF NOT EXISTS idx_server_subusers_owner ON server_subusers(is_owner) WHERE is_owner = true; +CREATE INDEX IF NOT EXISTS idx_server_subusers_server ON server_subusers("serverId"); +CREATE INDEX IF NOT EXISTS idx_server_subusers_user ON server_subusers("userId"); +CREATE INDEX IF NOT EXISTS idx_server_subusers_owner ON server_subusers("isOwner") WHERE "isOwner" = true; +CREATE INDEX IF NOT EXISTS idx_server_subusers_access_level ON server_subusers("accessLevel"); -- Comments for documentation -COMMENT ON TABLE server_subusers IS 'User-server relationships including owners and subusers with permissions'; -COMMENT ON COLUMN server_subusers.permissions IS 'Array of Pterodactyl permission strings for this user on this server'; -COMMENT ON COLUMN server_subusers.is_owner IS 'True if this user is the primary owner of the server'; -COMMENT ON COLUMN server_subusers.last_synced_at IS 'Last time this relationship was synced from Pterodactyl panel'; +COMMENT ON TABLE server_subusers IS 'User-server relationships including owners and subusers with flexible permission/access control'; +COMMENT ON COLUMN server_subusers.permissions IS 'Array of permission strings (format varies by serverType: Pterodactyl, VPS, email, etc.)'; +COMMENT ON COLUMN server_subusers."accessLevel" IS 'Hierarchical access level: owner, admin, user, viewer, billing_only'; +COMMENT ON COLUMN server_subusers."accessConfig" IS 'Type-specific access configuration for additional control rules'; +COMMENT ON COLUMN server_subusers."isOwner" IS 'True if this user is the primary owner of the server'; +COMMENT ON COLUMN server_subusers."lastSyncedAt" IS 'Last time this relationship was synced from the control panel'; diff --git a/schemas/schema_13_hytale_server_link.sql b/schemas/schema_13_hytale_server_link.sql index 32862cf..b23abf0 100644 --- a/schemas/schema_13_hytale_server_link.sql +++ b/schemas/schema_13_hytale_server_link.sql @@ -5,10 +5,10 @@ -- Add server_id column to hytale_game_sessions if not exists ALTER TABLE hytale_game_sessions -ADD COLUMN IF NOT EXISTS server_id TEXT REFERENCES servers(id) ON DELETE CASCADE; +ADD COLUMN IF NOT EXISTS "serverId" TEXT REFERENCES servers(id) ON DELETE CASCADE; -- Create index for efficient server-based lookups -CREATE INDEX IF NOT EXISTS idx_hytale_game_sessions_server_id ON hytale_game_sessions(server_id); +CREATE INDEX IF NOT EXISTS idx_hytale_game_sessions_server_id ON hytale_game_sessions("serverId"); -- Comment explaining the relationship -COMMENT ON COLUMN hytale_game_sessions.server_id IS 'Links game session to specific Pterodactyl server for automatic token push'; +COMMENT ON COLUMN hytale_game_sessions."serverId" IS 'Links game session to specific Pterodactyl server for automatic token push'; diff --git a/schemas/schema_14_partners.sql b/schemas/schema_14_partners.sql new file mode 100644 index 0000000..ab1349b --- /dev/null +++ b/schemas/schema_14_partners.sql @@ -0,0 +1,84 @@ +-- ============================================================================ +-- PARTNERS SCHEMA - Integration & Partnership Management +-- ============================================================================ + +-- Partners (hosting providers, integrations, collaborators) +CREATE TABLE IF NOT EXISTS partners ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL UNIQUE, + slug TEXT NOT NULL UNIQUE, + description TEXT, + + -- Partner classification + -- Values: hosting_provider, integration, reseller, affiliate, technology_partner + "partnerType" TEXT NOT NULL, + + -- Contact and branding + website TEXT, + logo_url TEXT, + "contactEmail" TEXT, + "contactPerson" TEXT, + + -- Partnership details + "partnershipStartDate" TIMESTAMP, + "partnershipEndDate" TIMESTAMP, + status TEXT DEFAULT 'active', -- active, inactive, pending, suspended + + -- Metadata + metadata JSONB DEFAULT '{}', -- API keys, endpoints, terms, etc. + + -- Admin controls + "createdById" TEXT REFERENCES users(id) ON DELETE SET NULL, + + "isActive" BOOLEAN DEFAULT true, + "isFeatured" BOOLEAN DEFAULT false, + + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "deletedAt" TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_partners_slug ON partners(slug); +CREATE INDEX IF NOT EXISTS idx_partners_type ON partners("partnerType"); +CREATE INDEX IF NOT EXISTS idx_partners_status ON partners(status); +CREATE INDEX IF NOT EXISTS idx_partners_is_active ON partners("isActive"); +CREATE INDEX IF NOT EXISTS idx_partners_is_featured ON partners("isFeatured"); + +-- Partner Services (what services each partner provides/integrates) +CREATE TABLE IF NOT EXISTS partner_services ( + id TEXT PRIMARY KEY, + "partnerId" TEXT NOT NULL REFERENCES partners(id) ON DELETE CASCADE, + + name TEXT NOT NULL, + description TEXT, + + -- Service configuration/endpoints + config JSONB DEFAULT '{}', + + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_partner_services_partner_id ON partner_services("partnerId"); + +-- Partner Revenue Sharing (commission structure and tracking) +CREATE TABLE IF NOT EXISTS partner_revenue_sharing ( + id TEXT PRIMARY KEY, + "partnerId" TEXT NOT NULL REFERENCES partners(id) ON DELETE CASCADE, + + -- Commission structure + "commissionType" TEXT NOT NULL, -- percentage, fixed, tiered + "commissionRate" DECIMAL(5, 2), -- for percentage: 0-100 + "commissionAmount" DECIMAL(10, 2), -- for fixed: amount + + -- Payout details + "payoutFrequency" TEXT DEFAULT 'monthly', -- monthly, quarterly, yearly + "minimumPayout" DECIMAL(10, 2), + "payoutMethod" TEXT, -- bank_transfer, paypal, crypto, etc. + "payoutAccount" TEXT, -- encrypted account details + + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_partner_revenue_sharing_partner_id ON partner_revenue_sharing("partnerId"); diff --git a/schemas/schema_15_careers.sql b/schemas/schema_15_careers.sql new file mode 100644 index 0000000..20742f9 --- /dev/null +++ b/schemas/schema_15_careers.sql @@ -0,0 +1,115 @@ +-- ============================================================================ +-- CAREERS SCHEMA - Job Positions & Applications +-- ============================================================================ + +-- Job Positions (open positions on careers page) +CREATE TABLE IF NOT EXISTS job_positions ( + id TEXT PRIMARY KEY, + title TEXT NOT NULL, + slug TEXT NOT NULL UNIQUE, + description TEXT NOT NULL, + + -- Job details + department TEXT NOT NULL, + "employmentType" TEXT NOT NULL, -- full_time, part_time, contract, internship + location TEXT NOT NULL, + "isRemote" BOOLEAN DEFAULT false, + "salaryMin" DECIMAL(10, 2), + "salaryMax" DECIMAL(10, 2), + "salaryCurrency" TEXT DEFAULT 'USD', + + -- Job specifications + "requiredSkills" TEXT[] DEFAULT '{}', + "niceToHaveSkills" TEXT[] DEFAULT '{}', + "yearsOfExperience" INTEGER, + + -- Content + "shortDescription" TEXT, + requirements TEXT, + benefits TEXT, + "aboutRole" TEXT, + + -- Status and visibility + status TEXT DEFAULT 'draft', -- draft, published, closed, archived + "isActive" BOOLEAN DEFAULT true, + + -- Metadata + "createdById" TEXT REFERENCES users(id) ON DELETE SET NULL, + + "publishedAt" TIMESTAMP, + "closedAt" TIMESTAMP, + + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "deletedAt" TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_job_positions_slug ON job_positions(slug); +CREATE INDEX IF NOT EXISTS idx_job_positions_department ON job_positions(department); +CREATE INDEX IF NOT EXISTS idx_job_positions_employment_type ON job_positions("employmentType"); +CREATE INDEX IF NOT EXISTS idx_job_positions_status ON job_positions(status); +CREATE INDEX IF NOT EXISTS idx_job_positions_is_active ON job_positions("isActive"); +CREATE INDEX IF NOT EXISTS idx_job_positions_created_at ON job_positions("createdAt"); + +-- Job Applications (applications from candidates) +CREATE TABLE IF NOT EXISTS job_applications ( + id TEXT PRIMARY KEY, + "jobPositionId" TEXT NOT NULL REFERENCES job_positions(id) ON DELETE CASCADE, + + -- Applicant info + "firstName" TEXT NOT NULL, + "lastName" TEXT NOT NULL, + email TEXT NOT NULL, + phone TEXT, + + -- Application details + "resumeUrl" TEXT, + "portfolioUrl" TEXT, + "linkedinUrl" TEXT, + "githubUrl" TEXT, + + -- Cover letter and additional info + "coverLetter" TEXT, + "additionalInfo" JSONB DEFAULT '{}', -- custom fields, answers to screening questions, etc. + + -- Status tracking + status TEXT DEFAULT 'new', -- new, reviewing, shortlisted, rejected, offered, hired, withdrawn + "ratingScore" DECIMAL(3, 1), -- 0-5 star rating + notes TEXT, -- internal notes for hiring team + + -- Metadata + "appliedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "reviewedAt" TIMESTAMP, + "reviewedById" TEXT REFERENCES users(id) ON DELETE SET NULL, + + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "deletedAt" TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_job_applications_job_position_id ON job_applications("jobPositionId"); +CREATE INDEX IF NOT EXISTS idx_job_applications_email ON job_applications(email); +CREATE INDEX IF NOT EXISTS idx_job_applications_status ON job_applications(status); +CREATE INDEX IF NOT EXISTS idx_job_applications_applied_at ON job_applications("appliedAt"); +CREATE INDEX IF NOT EXISTS idx_job_applications_job_status ON job_applications("jobPositionId", status); + +-- Job Application Activity (track application status changes and interactions) +CREATE TABLE IF NOT EXISTS job_application_activity ( + id TEXT PRIMARY KEY, + "applicationId" TEXT NOT NULL REFERENCES job_applications(id) ON DELETE CASCADE, + + -- Activity log + "activityType" TEXT NOT NULL, -- status_change, note_added, email_sent, interview_scheduled, etc. + description TEXT, + "oldStatus" TEXT, + "newStatus" TEXT, + + -- Who performed the action + "performedById" TEXT REFERENCES users(id) ON DELETE SET NULL, + + "createdAt" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_job_application_activity_application_id ON job_application_activity("applicationId"); +CREATE INDEX IF NOT EXISTS idx_job_application_activity_type ON job_application_activity("activityType"); +CREATE INDEX IF NOT EXISTS idx_job_application_activity_created_at ON job_application_activity("createdAt");