openEditModal(job)}
+ >
-
+
@@ -290,7 +326,7 @@ const CronJobs = () => {
-
+
e.stopPropagation()}>
handleRunJob(job.id)}
@@ -305,6 +341,16 @@ const CronJobs = () => {
)}
+
openEditModal(job)}
+ title="Edit"
+ >
+
+
+
+
+
handleToggleJob(job.id, job.enabled)}
@@ -339,15 +385,15 @@ const CronJobs = () => {
- {/* Create Job Modal */}
- {showCreateModal && (
-
setShowCreateModal(false)}>
+ {/* Create/Edit Job Modal */}
+ {showJobModal && (
+
e.stopPropagation()}>
-
Create Cron Job
- setShowCreateModal(false)}>×
+ {editingJob ? 'Edit Cron Job' : 'Create Cron Job'}
+ ×
-
)}
+
+ {/* Run Output Modal */}
+ {runOutput && (
+
setRunOutput(null)}>
+
e.stopPropagation()}>
+
+
Run Output: {runOutput.jobName}
+ setRunOutput(null)}>×
+
+
+
+
+ Exit Code
+
+ {runOutput.exitCode}
+
+
+ {runOutput.stdout && (
+
+
stdout
+
{runOutput.stdout}
+
+ )}
+ {runOutput.stderr && (
+
+
stderr
+
{runOutput.stderr}
+
+ )}
+ {!runOutput.stdout && !runOutput.stderr && (
+
No output produced.
+ )}
+
+
+
+ setRunOutput(null)}>Close
+
+
+
+ )}
);
};
diff --git a/frontend/src/pages/Databases.jsx b/frontend/src/pages/Databases.jsx
index 6a756fc..7b089d6 100644
--- a/frontend/src/pages/Databases.jsx
+++ b/frontend/src/pages/Databases.jsx
@@ -1,10 +1,15 @@
import React, { useState, useEffect } from 'react';
+import { useParams } from 'react-router-dom';
+import useTabParam from '../hooks/useTabParam';
import api from '../services/api';
import { useToast } from '../contexts/ToastContext';
import QueryRunner from '../components/QueryRunner';
+const VALID_TABS = ['mysql', 'postgresql', 'docker', 'backups', 'sqlite'];
+
const Databases = () => {
- const [activeTab, setActiveTab] = useState('mysql');
+ const { tab } = useParams();
+ const [activeTab, setActiveTab] = useTabParam('/databases', VALID_TABS);
const [status, setStatus] = useState(null);
const [loading, setLoading] = useState(true);
@@ -18,7 +23,7 @@ const Databases = () => {
setStatus(data);
// Default to available server
- if (!data.mysql.running && data.postgresql.running) {
+ if (!tab && !data.mysql.running && data.postgresql.running) {
setActiveTab('postgresql');
}
} catch (err) {
diff --git a/frontend/src/pages/Docker.jsx b/frontend/src/pages/Docker.jsx
index 7d4ae1a..3b7c353 100644
--- a/frontend/src/pages/Docker.jsx
+++ b/frontend/src/pages/Docker.jsx
@@ -1,4 +1,5 @@
import React, { useState, useEffect, useCallback, createContext, useContext } from 'react';
+import useTabParam from '../hooks/useTabParam';
import api from '../services/api';
import { useToast } from '../contexts/ToastContext';
@@ -6,8 +7,10 @@ import { useToast } from '../contexts/ToastContext';
const ServerContext = createContext({ serverId: 'local', serverName: 'Local' });
const useServer = () => useContext(ServerContext);
+const VALID_TABS = ['containers', 'compose', 'images', 'volumes', 'networks'];
+
const Docker = () => {
- const [activeTab, setActiveTab] = useState('containers');
+ const [activeTab, setActiveTab] = useTabParam('/docker', VALID_TABS);
const [dockerStatus, setDockerStatus] = useState(null);
const [loading, setLoading] = useState(true);
const [servers, setServers] = useState([]);
diff --git a/frontend/src/pages/FTPServer.jsx b/frontend/src/pages/FTPServer.jsx
index 53ae74a..c6cb604 100644
--- a/frontend/src/pages/FTPServer.jsx
+++ b/frontend/src/pages/FTPServer.jsx
@@ -1,9 +1,12 @@
import { useState, useEffect } from 'react';
+import useTabParam from '../hooks/useTabParam';
import { api } from '../services/api';
import { useToast } from '../contexts/ToastContext';
import Spinner from '../components/Spinner';
import ConfirmDialog from '../components/ConfirmDialog';
+const VALID_TABS = ['overview', 'users', 'connections', 'logs'];
+
function FTPServer() {
const [status, setStatus] = useState(null);
const [users, setUsers] = useState([]);
@@ -11,7 +14,7 @@ function FTPServer() {
const [config, setConfig] = useState(null);
const [logs, setLogs] = useState('');
const [loading, setLoading] = useState(true);
- const [activeTab, setActiveTab] = useState('overview');
+ const [activeTab, setActiveTab] = useTabParam('/ftp', VALID_TABS);
const [showUserModal, setShowUserModal] = useState(false);
const [showPasswordModal, setShowPasswordModal] = useState(false);
const [showInstallModal, setShowInstallModal] = useState(false);
diff --git a/frontend/src/pages/Git.jsx b/frontend/src/pages/Git.jsx
index 2b65a69..c65d492 100644
--- a/frontend/src/pages/Git.jsx
+++ b/frontend/src/pages/Git.jsx
@@ -1,13 +1,16 @@
import { useState, useEffect } from 'react';
+import useTabParam from '../hooks/useTabParam';
import { api } from '../services/api';
import { useToast } from '../contexts/ToastContext';
import Spinner from '../components/Spinner';
import ConfirmDialog from '../components/ConfirmDialog';
+const VALID_TABS = ['overview', 'repositories', 'access', 'webhooks', 'deployments', 'settings'];
+
function Git() {
const [status, setStatus] = useState(null);
const [loading, setLoading] = useState(true);
- const [activeTab, setActiveTab] = useState('overview');
+ const [activeTab, setActiveTab] = useTabParam('/git', VALID_TABS);
const [showInstallModal, setShowInstallModal] = useState(false);
const [actionLoading, setActionLoading] = useState(false);
const [confirmDialog, setConfirmDialog] = useState(null);
diff --git a/frontend/src/pages/Monitoring.jsx b/frontend/src/pages/Monitoring.jsx
index 4acd0cf..48cb30a 100644
--- a/frontend/src/pages/Monitoring.jsx
+++ b/frontend/src/pages/Monitoring.jsx
@@ -1,7 +1,10 @@
import React, { useState, useEffect } from 'react';
+import useTabParam from '../hooks/useTabParam';
import api from '../services/api';
import { useToast } from '../contexts/ToastContext';
+const VALID_TABS = ['overview', 'alerts', 'config', 'thresholds'];
+
const Monitoring = () => {
const toast = useToast();
const [status, setStatus] = useState(null);
@@ -10,7 +13,7 @@ const Monitoring = () => {
const [alertHistory, setAlertHistory] = useState([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
- const [activeTab, setActiveTab] = useState('overview');
+ const [activeTab, setActiveTab] = useTabParam('/monitoring', VALID_TABS);
// Config form state
const [configForm, setConfigForm] = useState({
diff --git a/frontend/src/pages/Security.jsx b/frontend/src/pages/Security.jsx
index 43fab09..32759e3 100644
--- a/frontend/src/pages/Security.jsx
+++ b/frontend/src/pages/Security.jsx
@@ -1,12 +1,15 @@
import React, { useState, useEffect, useCallback } from 'react';
import { useAuth } from '../contexts/AuthContext';
+import useTabParam from '../hooks/useTabParam';
import api from '../services/api';
import ConfirmDialog from '../components/ConfirmDialog';
import { useToast } from '../contexts/ToastContext';
+const VALID_TABS = ['overview', 'firewall', 'fail2ban', 'ssh-keys', 'ip-lists', 'scanner', 'quarantine', 'integrity', 'audit', 'vulnerability', 'updates', 'events', 'settings'];
+
const Security = () => {
const { isAdmin } = useAuth();
- const [activeTab, setActiveTab] = useState('overview');
+ const [activeTab, setActiveTab] = useTabParam('/security', VALID_TABS);
const [status, setStatus] = useState(null);
const [loading, setLoading] = useState(true);
diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx
index 8bfe553..0de38ae 100644
--- a/frontend/src/pages/Settings.jsx
+++ b/frontend/src/pages/Settings.jsx
@@ -1,4 +1,5 @@
import React, { useState, useEffect } from 'react';
+import useTabParam from '../hooks/useTabParam';
import { useAuth } from '../contexts/AuthContext';
import { useTheme } from '../contexts/ThemeContext';
import api from '../services/api';
@@ -18,8 +19,10 @@ import {
} from 'lucide-react';
import ServerKitLogo from '../assets/ServerKitLogo.svg';
+const VALID_TABS = ['profile', 'security', 'appearance', 'notifications', 'system', 'users', 'audit', 'site', 'developer', 'about'];
+
const Settings = () => {
- const [activeTab, setActiveTab] = useState('profile');
+ const [activeTab, setActiveTab] = useTabParam('/settings', VALID_TABS);
const { isAdmin } = useAuth();
const [devMode, setDevMode] = useState(false);
diff --git a/frontend/src/pages/Terminal.jsx b/frontend/src/pages/Terminal.jsx
index e0fb279..967e77e 100644
--- a/frontend/src/pages/Terminal.jsx
+++ b/frontend/src/pages/Terminal.jsx
@@ -1,9 +1,12 @@
import React, { useState, useEffect, useRef } from 'react';
+import useTabParam from '../hooks/useTabParam';
import api from '../services/api';
import { useToast } from '../contexts/ToastContext';
+const VALID_TABS = ['logs', 'journal', 'processes', 'services'];
+
const Terminal = () => {
- const [activeTab, setActiveTab] = useState('logs');
+ const [activeTab, setActiveTab] = useTabParam('/terminal', VALID_TABS);
return (
diff --git a/frontend/src/pages/WordPressDetail.jsx b/frontend/src/pages/WordPressDetail.jsx
index 6f6da15..194f119 100644
--- a/frontend/src/pages/WordPressDetail.jsx
+++ b/frontend/src/pages/WordPressDetail.jsx
@@ -1,6 +1,7 @@
import React, { useState, useEffect } from 'react';
import { useParams, useNavigate, Link } from 'react-router-dom';
import { ExternalLink, Settings, RefreshCw, Plus, Database, GitBranch, Package, Palette, Archive } from 'lucide-react';
+import useTabParam from '../hooks/useTabParam';
import wordpressApi from '../services/wordpress';
import { useToast } from '../contexts/ToastContext';
import { EnvironmentCard, SnapshotTable, GitConnectForm, CommitList } from '../components/wordpress';
@@ -59,13 +60,15 @@ const DetailPageSkeleton = () => (
);
+const VALID_TABS = ['overview', 'environments', 'database', 'plugins', 'themes', 'git', 'backups'];
+
const WordPressDetail = () => {
const { id } = useParams();
const navigate = useNavigate();
const toast = useToast();
const [site, setSite] = useState(null);
const [loading, setLoading] = useState(true);
- const [activeTab, setActiveTab] = useState('overview');
+ const [activeTab, setActiveTab] = useTabParam(`/wordpress/${id}`, VALID_TABS);
useEffect(() => {
loadSite();
diff --git a/frontend/src/pages/WordPressProject.jsx b/frontend/src/pages/WordPressProject.jsx
index 197bad2..e96e8b6 100644
--- a/frontend/src/pages/WordPressProject.jsx
+++ b/frontend/src/pages/WordPressProject.jsx
@@ -1,6 +1,7 @@
import React, { useState, useEffect, useCallback, useRef } from 'react';
import { useParams, useNavigate, Link } from 'react-router-dom';
import { Plus, ExternalLink, RefreshCw, GitBranch, Search, Shield, Activity, Settings } from 'lucide-react';
+import useTabParam from '../hooks/useTabParam';
import wordpressApi from '../services/wordpress';
import { useToast } from '../contexts/ToastContext';
import {
@@ -24,6 +25,8 @@ import { io } from 'socket.io-client';
const SOCKET_URL = import.meta.env.VITE_API_URL?.replace('/api/v1', '') || 'http://localhost:5000';
+const VALID_TABS = ['pipeline', 'activity', 'health'];
+
const WordPressProject = () => {
const { id } = useParams();
const navigate = useNavigate();
@@ -31,7 +34,7 @@ const WordPressProject = () => {
const [pipeline, setPipeline] = useState(null);
const [loading, setLoading] = useState(true);
- const [activeTab, setActiveTab] = useState('pipeline');
+ const [activeTab, setActiveTab] = useTabParam(`/wordpress/projects/${id}`, VALID_TABS);
const [showCreateEnvModal, setShowCreateEnvModal] = useState(false);
const [promoteModal, setPromoteModal] = useState(null);
const [syncModal, setSyncModal] = useState(null);
diff --git a/frontend/src/services/api.js b/frontend/src/services/api.js
index b036feb..59891f4 100644
--- a/frontend/src/services/api.js
+++ b/frontend/src/services/api.js
@@ -2044,6 +2044,13 @@ class ApiService {
});
}
+ async updateCronJob(jobId, data) {
+ return this.request(`/cron/jobs/${jobId}`, {
+ method: 'PUT',
+ body: data
+ });
+ }
+
async deleteCronJob(jobId) {
return this.request(`/cron/jobs/${jobId}`, { method: 'DELETE' });
}
diff --git a/frontend/src/styles/pages/_cron.less b/frontend/src/styles/pages/_cron.less
index 8e36f2e..80f17b4 100644
--- a/frontend/src/styles/pages/_cron.less
+++ b/frontend/src/styles/pages/_cron.less
@@ -19,6 +19,15 @@
}
}
}
+
+ // Global SVG rule for buttons
+ .btn svg {
+ stroke: currentColor;
+ fill: none;
+ stroke-width: 2;
+ stroke-linecap: round;
+ stroke-linejoin: round;
+ }
}
// Stats icons
@@ -50,7 +59,12 @@
.cron-item {
.list-item-base();
- transition: opacity 0.2s ease;
+ transition: opacity 0.2s ease, background-color 0.15s ease;
+ cursor: pointer;
+
+ &:hover {
+ background: @bg-hover;
+ }
&.disabled {
opacity: 0.6;
@@ -163,6 +177,54 @@
}
}
+// Run output display
+.run-output {
+ display: flex;
+ flex-direction: column;
+ gap: @space-4;
+}
+
+.run-output-exit {
+ display: flex;
+ align-items: center;
+ gap: @space-3;
+}
+
+.run-output-label {
+ font-size: @font-size-sm;
+ font-weight: @font-weight-semibold;
+ color: @text-secondary;
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+}
+
+.run-output-section {
+ display: flex;
+ flex-direction: column;
+ gap: @space-2;
+}
+
+.run-output-pre {
+ font-family: @font-mono;
+ font-size: @font-size-xs;
+ background: @bg-hover;
+ color: @text-primary;
+ padding: @space-3;
+ border-radius: @radius-md;
+ border: 1px solid @border-subtle;
+ max-height: 300px;
+ overflow: auto;
+ white-space: pre-wrap;
+ word-break: break-all;
+ margin: 0;
+
+ &--error {
+ color: @danger;
+ border-color: fade(@danger, 20%);
+ background: fade(@danger, 5%);
+ }
+}
+
// Spinner inline for running jobs
.spinner-inline {
display: inline-block;
From c4591c405ecc091d54b5b2bc9c9c9bf96691b61c Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Tue, 3 Mar 2026 22:31:27 -0500
Subject: [PATCH 05/18] Ignore virtual mounts, dedupe devices; tweak sidebar
Backend: filter out virtual/pseudo filesystem types and skip noisy mount prefixes (e.g. /snap/, /var/lib/docker/, /run/), and deduplicate mounts by device so only a single mount per device is reported for disk usage. Frontend: make the file manager sidebar scrollable with a max-height (calc(100vh - 120px)), disable that height in the small-screen modal, and remove max-height restriction for the last child to prevent clipping.
---
backend/app/services/file_service.py | 25 +++++++++++++++++++-
frontend/src/styles/pages/_file-manager.less | 8 ++++++-
2 files changed, 31 insertions(+), 2 deletions(-)
diff --git a/backend/app/services/file_service.py b/backend/app/services/file_service.py
index 051a2e0..ab02a7f 100644
--- a/backend/app/services/file_service.py
+++ b/backend/app/services/file_service.py
@@ -483,14 +483,37 @@ def _format_size(size: int) -> str:
size /= 1024
return f"{size:.1f} PB"
+ # Virtual/pseudo filesystem types to hide from disk usage
+ _VIRTUAL_FSTYPES = {
+ 'squashfs', 'tmpfs', 'devtmpfs', 'devfs', 'overlay', 'aufs',
+ 'proc', 'sysfs', 'cgroup', 'cgroup2', 'debugfs', 'tracefs',
+ 'securityfs', 'pstore', 'efivarfs', 'bpf', 'fusectl',
+ 'configfs', 'hugetlbfs', 'mqueue', 'ramfs', 'nsfs',
+ }
+
+ # Mount-point prefixes that are always noise
+ _SKIP_MOUNT_PREFIXES = ('/snap/', '/var/lib/docker/', '/run/')
+
@classmethod
def get_all_disk_mounts(cls) -> Dict:
- """Get disk usage for all mount points."""
+ """Get disk usage for all physical mount points, deduplicated by device."""
try:
partitions = psutil.disk_partitions(all=False)
mounts = []
+ seen_devices = set()
for partition in partitions:
+ # Skip virtual/pseudo filesystems
+ if partition.fstype in cls._VIRTUAL_FSTYPES:
+ continue
+ # Skip noisy mount prefixes (snaps, docker layers, etc.)
+ if any(partition.mountpoint.startswith(p) for p in cls._SKIP_MOUNT_PREFIXES):
+ continue
+ # Deduplicate: keep only the shortest mount path per device
+ if partition.device in seen_devices:
+ continue
+ seen_devices.add(partition.device)
+
try:
usage = psutil.disk_usage(partition.mountpoint)
mounts.append({
diff --git a/frontend/src/styles/pages/_file-manager.less b/frontend/src/styles/pages/_file-manager.less
index 37e282c..bc912ff 100644
--- a/frontend/src/styles/pages/_file-manager.less
+++ b/frontend/src/styles/pages/_file-manager.less
@@ -180,6 +180,8 @@
display: flex;
flex-direction: column;
gap: @space-4;
+ overflow-y: auto;
+ max-height: calc(100vh - 120px);
.transition(all);
@media (max-width: @breakpoint-lg) {
@@ -189,11 +191,11 @@
bottom: 0;
width: 100%;
max-width: 360px;
+ max-height: none;
background: @bg-body;
padding: @space-4;
z-index: @z-modal;
box-shadow: -4px 0 20px rgba(0, 0, 0, 0.3);
- overflow-y: auto;
}
}
@@ -207,6 +209,10 @@
background: @bg-card;
border-radius: @radius-lg;
overflow: hidden;
+
+ &:last-child {
+ max-height: none;
+ }
}
.sidebar-section-header {
From 7037ef345a209d2abfd81a3fe58ed8078730ee34 Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Tue, 3 Mar 2026 23:12:27 -0500
Subject: [PATCH 06/18] Run commands via privileged helpers; UI tweaks
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Backend: Replace direct sudo/subprocess.run calls with run_privileged and privileged_cmd, add _needs_sudo/privileged_cmd logic and is_command_available checks, and return clearer errors for missing binaries (journalctl/grep/tail/truncate/logrotate/systemctl/service). Add FileNotFoundError handling and use privileged_cmd for Popen.
Frontend: UI and style refinements across Downloads, Security and Terminal pages — simplified Linux icon, added release date and banner actions, redesigned scanner UI into interactive scan cards with custom path and toolbar, handle unavailable journalctl with an empty state, update SVG icons. Styles (.less): switch mono font to JetBrains Mono, adjust spacing/gaps, sidebar text truncation, revamp downloads and security card styles, and improve terminal log fonts, wrapping and processes table layout.
---
backend/app/services/log_service.py | 59 +++++-----
backend/app/services/process_service.py | 26 +++--
backend/app/utils/system.py | 51 ++++++---
frontend/src/pages/Downloads.jsx | 44 ++++++--
frontend/src/pages/Security.jsx | 130 +++++++++++++---------
frontend/src/pages/Terminal.jsx | 37 +++++-
frontend/src/styles/_variables.less | 2 +-
frontend/src/styles/components/_tabs.less | 2 +-
frontend/src/styles/layout/_sidebar.less | 3 +
frontend/src/styles/pages/_downloads.less | 113 +++++++++++++------
frontend/src/styles/pages/_security.less | 93 +++++++++++-----
frontend/src/styles/pages/_terminal.less | 80 ++++++++-----
12 files changed, 431 insertions(+), 209 deletions(-)
diff --git a/backend/app/services/log_service.py b/backend/app/services/log_service.py
index 441b4a9..4d31841 100644
--- a/backend/app/services/log_service.py
+++ b/backend/app/services/log_service.py
@@ -7,6 +7,7 @@
import queue
from app import paths
+from app.utils.system import run_privileged, privileged_cmd, is_command_available
class LogService:
@@ -81,22 +82,11 @@ def read_log(cls, filepath: str, lines: int = 100, from_end: bool = True) -> Dic
return {'success': False, 'error': 'Log file not found'}
try:
- if from_end:
- # Use tail to get last N lines
- result = subprocess.run(
- ['sudo', 'tail', '-n', str(lines), filepath],
- capture_output=True,
- text=True,
- timeout=30
- )
- else:
- # Use head to get first N lines
- result = subprocess.run(
- ['sudo', 'head', '-n', str(lines), filepath],
- capture_output=True,
- text=True,
- timeout=30
- )
+ tool = 'tail' if from_end else 'head'
+ result = run_privileged(
+ [tool, '-n', str(lines), filepath],
+ timeout=30
+ )
if result.returncode == 0:
log_lines = result.stdout.split('\n')
@@ -109,6 +99,8 @@ def read_log(cls, filepath: str, lines: int = 100, from_end: bool = True) -> Dic
else:
return {'success': False, 'error': result.stderr}
+ except FileNotFoundError:
+ return {'success': False, 'error': f'{tool} command not found'}
except Exception as e:
return {'success': False, 'error': str(e)}
@@ -122,10 +114,8 @@ def search_log(cls, filepath: str, pattern: str, lines: int = 100) -> Dict:
return {'success': False, 'error': 'Log file not found'}
try:
- result = subprocess.run(
- ['sudo', 'grep', '-i', '-m', str(lines), pattern, filepath],
- capture_output=True,
- text=True,
+ result = run_privileged(
+ ['grep', '-i', '-m', str(lines), pattern, filepath],
timeout=60
)
@@ -141,6 +131,8 @@ def search_log(cls, filepath: str, pattern: str, lines: int = 100) -> Dict:
else:
return {'success': False, 'error': result.stderr}
+ except FileNotFoundError:
+ return {'success': False, 'error': 'grep command not found'}
except Exception as e:
return {'success': False, 'error': str(e)}
@@ -222,8 +214,11 @@ def get_docker_app_logs(cls, app_name: str, app_dir: str, lines: int = 100) -> D
def get_journalctl_logs(cls, unit: str = None, lines: int = 100,
since: str = None, priority: str = None) -> Dict:
"""Get logs from systemd journal."""
+ if not is_command_available('journalctl'):
+ return {'success': False, 'error': 'journalctl is not available on this system'}
+
try:
- cmd = ['sudo', 'journalctl', '-n', str(lines), '--no-pager', '-o', 'short-iso']
+ cmd = ['journalctl', '-n', str(lines), '--no-pager', '-o', 'short-iso']
if unit:
cmd.extend(['-u', unit])
@@ -232,7 +227,7 @@ def get_journalctl_logs(cls, unit: str = None, lines: int = 100,
if priority:
cmd.extend(['-p', priority])
- result = subprocess.run(cmd, capture_output=True, text=True, timeout=60)
+ result = run_privileged(cmd, timeout=60)
if result.returncode == 0:
log_lines = result.stdout.split('\n')
@@ -244,6 +239,8 @@ def get_journalctl_logs(cls, unit: str = None, lines: int = 100,
else:
return {'success': False, 'error': result.stderr}
+ except FileNotFoundError:
+ return {'success': False, 'error': 'journalctl command not found'}
except Exception as e:
return {'success': False, 'error': str(e)}
@@ -257,10 +254,8 @@ def clear_log(cls, filepath: str) -> Dict:
return {'success': False, 'error': 'Log file not found'}
try:
- result = subprocess.run(
- ['sudo', 'truncate', '-s', '0', filepath],
- capture_output=True,
- text=True
+ result = run_privileged(
+ ['truncate', '-s', '0', filepath]
)
if result.returncode == 0:
@@ -268,6 +263,8 @@ def clear_log(cls, filepath: str) -> Dict:
else:
return {'success': False, 'error': result.stderr}
+ except FileNotFoundError:
+ return {'success': False, 'error': 'truncate command not found'}
except Exception as e:
return {'success': False, 'error': str(e)}
@@ -275,10 +272,8 @@ def clear_log(cls, filepath: str) -> Dict:
def rotate_logs(cls) -> Dict:
"""Trigger log rotation."""
try:
- result = subprocess.run(
- ['sudo', 'logrotate', '-f', '/etc/logrotate.conf'],
- capture_output=True,
- text=True,
+ result = run_privileged(
+ ['logrotate', '-f', '/etc/logrotate.conf'],
timeout=120
)
@@ -286,6 +281,8 @@ def rotate_logs(cls) -> Dict:
'success': result.returncode == 0,
'message': 'Logs rotated' if result.returncode == 0 else result.stderr
}
+ except FileNotFoundError:
+ return {'success': False, 'error': 'logrotate command not found'}
except Exception as e:
return {'success': False, 'error': str(e)}
@@ -302,7 +299,7 @@ def tail_log(cls, filepath: str, callback, stop_event: threading.Event = None):
try:
process = subprocess.Popen(
- ['sudo', 'tail', '-f', '-n', '0', filepath],
+ privileged_cmd(['tail', '-f', '-n', '0', filepath]),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
diff --git a/backend/app/services/process_service.py b/backend/app/services/process_service.py
index bb42443..af1da4b 100644
--- a/backend/app/services/process_service.py
+++ b/backend/app/services/process_service.py
@@ -3,6 +3,8 @@
import platform
from typing import List, Dict, Optional
+from app.utils.system import run_privileged, is_command_available
+
class ProcessService:
"""Service for process and service management."""
@@ -129,13 +131,15 @@ def control_service(cls, service_name: str, action: str) -> Dict:
try:
if system == 'Linux':
# Try systemctl first (systemd)
- cmd = ['sudo', 'systemctl', action, service_name]
- result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
+ result = run_privileged(
+ ['systemctl', action, service_name], timeout=30
+ )
if result.returncode != 0:
# Fall back to service command
- cmd = ['sudo', 'service', service_name, action]
- result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
+ result = run_privileged(
+ ['service', service_name, action], timeout=30
+ )
if result.returncode == 0:
return {'success': True, 'message': f'Service {service_name} {action} successful'}
@@ -158,6 +162,8 @@ def control_service(cls, service_name: str, action: str) -> Dict:
else:
return {'success': False, 'error': f'Unsupported platform: {system}'}
+ except FileNotFoundError:
+ return {'success': False, 'error': 'systemctl/service command not found'}
except subprocess.TimeoutExpired:
return {'success': False, 'error': 'Command timed out'}
except Exception as e:
@@ -170,9 +176,13 @@ def get_service_logs(cls, service_name: str, lines: int = 100) -> Dict:
try:
if system == 'Linux':
- # Use journalctl for systemd services
- cmd = ['sudo', 'journalctl', '-u', service_name, '-n', str(lines), '--no-pager']
- result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
+ if not is_command_available('journalctl'):
+ return {'success': False, 'error': 'journalctl is not available on this system'}
+
+ result = run_privileged(
+ ['journalctl', '-u', service_name, '-n', str(lines), '--no-pager'],
+ timeout=30
+ )
if result.returncode == 0:
return {'success': True, 'logs': result.stdout}
@@ -182,5 +192,7 @@ def get_service_logs(cls, service_name: str, lines: int = 100) -> Dict:
else:
return {'success': False, 'error': 'Log retrieval not supported on this platform'}
+ except FileNotFoundError:
+ return {'success': False, 'error': 'journalctl command not found'}
except Exception as e:
return {'success': False, 'error': str(e)}
diff --git a/backend/app/utils/system.py b/backend/app/utils/system.py
index 84231ab..1c58ea8 100644
--- a/backend/app/utils/system.py
+++ b/backend/app/utils/system.py
@@ -10,27 +10,50 @@
from typing import List, Optional, Union
+def _needs_sudo() -> bool:
+ """Return True if the current process should prepend sudo to commands.
+
+ Returns False when:
+ - Running on Windows (no sudo concept; dev environment)
+ - Already running as root (e.g. inside Docker)
+ - ``sudo`` is not installed (minimal containers)
+ """
+ if os.name == 'nt':
+ return False
+ if os.geteuid() == 0:
+ return False
+ if not shutil.which('sudo'):
+ return False
+ return True
+
+
+def privileged_cmd(cmd: Union[List[str], str]) -> Union[List[str], str]:
+ """Return *cmd* with ``sudo`` prepended when necessary.
+
+ Use this when you need the command list for ``Popen`` or other non-``run``
+ callers. For simple ``subprocess.run`` calls prefer :func:`run_privileged`.
+ """
+ if isinstance(cmd, str):
+ if _needs_sudo() and not cmd.lstrip().startswith('sudo '):
+ return f'sudo {cmd}'
+ return cmd
+
+ cmd = list(cmd)
+ if _needs_sudo() and cmd[0] != 'sudo':
+ return ['sudo'] + cmd
+ return cmd
+
+
def run_privileged(cmd: Union[List[str], str], **kwargs) -> subprocess.CompletedProcess:
"""Run a command with sudo if the current process is not root.
- Prepends ``sudo`` when ``os.geteuid() != 0`` and the command does not
- already start with ``sudo``. Defaults to ``capture_output=True, text=True``
- but callers can override any kwarg.
+ Prepends ``sudo`` only when needed (not root, not Windows, sudo exists).
+ Defaults to ``capture_output=True, text=True`` but callers can override.
Returns the raw ``CompletedProcess`` so services keep their existing
error-handling patterns.
"""
- if isinstance(cmd, str):
- # Shell-mode — caller is responsible for quoting
- needs_sudo = os.geteuid() != 0 and not cmd.lstrip().startswith('sudo ')
- if needs_sudo:
- cmd = f'sudo {cmd}'
- else:
- cmd = list(cmd)
- needs_sudo = os.geteuid() != 0 and cmd[0] != 'sudo'
- if needs_sudo:
- cmd = ['sudo'] + cmd
-
+ cmd = privileged_cmd(cmd)
kwargs.setdefault('capture_output', True)
kwargs.setdefault('text', True)
return subprocess.run(cmd, **kwargs)
diff --git a/frontend/src/pages/Downloads.jsx b/frontend/src/pages/Downloads.jsx
index 2f32f05..dca94cc 100644
--- a/frontend/src/pages/Downloads.jsx
+++ b/frontend/src/pages/Downloads.jsx
@@ -3,8 +3,23 @@ import api from '../services/api';
// Platform icons as SVG components
const LinuxIcon = () => (
-
-
+
+ {/* Tux body */}
+
+ {/* Head */}
+
+ {/* Left eye */}
+
+ {/* Right eye */}
+
+ {/* Beak */}
+
+ {/* Belly */}
+
+ {/* Left foot */}
+
+ {/* Right foot */}
+
);
@@ -163,19 +178,26 @@ function Downloads() {
Latest Version
v{versionInfo.version}
+ Released {new Date(versionInfo.published_at).toLocaleDateString()}
-
-
Released: {new Date(versionInfo.published_at).toLocaleDateString()}
+
@@ -196,12 +218,12 @@ function Downloads() {
key={platform.id}
className={`download-card ${!isAvailable ? 'unavailable' : ''}`}
>
-
+
-
-
{platform.name}
- {platform.arch}
-
+
+
+
{platform.name}
+ {platform.arch}
{
)}
-
-
-
Malware Scanner
-
- {updating ? 'Updating...' : 'Update Definitions'}
+
+
!isScanning && !scanning && handleStartScan('quick')}>
+
+
Quick Scan
+
Scan common web directories
+
{ e.stopPropagation(); handleStartScan('quick'); }}
+ disabled={isScanning || scanning}
+ >
+ Start Scan
-
-
-
-
handleStartScan('quick')}
- disabled={isScanning || scanning}
- >
-
-
-
- Quick Scan
-
-
Scan common web directories
-
-
- handleStartScan('full')}
- disabled={isScanning || scanning}
- >
-
-
-
-
-
- Full Scan
-
- Scan entire system (slow)
-
+
!isScanning && !scanning && handleStartScan('full')}>
+
+
+
+
+
+
+
+
Full Scan
+
Scan entire system (slow)
+
{ e.stopPropagation(); handleStartScan('full'); }}
+ disabled={isScanning || scanning}
+ >
+ Start Scan
+
+
-
-
Custom Path
-
- setScanPath(e.target.value)}
- placeholder="/path/to/scan"
- disabled={isScanning}
- />
- handleStartScan('custom')}
- disabled={isScanning || scanning || !scanPath}
- >
- Scan
-
-
-
+
+
+
Custom Path
+
Scan a specific directory
+
+ setScanPath(e.target.value)}
+ placeholder="/path/to/scan"
+ disabled={isScanning}
+ onClick={(e) => e.stopPropagation()}
+ />
+ handleStartScan('custom')}
+ disabled={isScanning || scanning || !scanPath}
+ >
+ Scan
+
+
+
+
+
+
+
+
+ {updating ? 'Updating...' : 'Update Definitions'}
+
+
+
{isScanning && (
@@ -1015,7 +1029,13 @@ const ScannerTab = () => {
{history.length === 0 ? (
-
No scan history available.
+
+
+
+
+
+
No scans have been run yet. Start a scan above to check for threats.
+
) : (
diff --git a/frontend/src/pages/Terminal.jsx b/frontend/src/pages/Terminal.jsx
index 967e77e..3e72290 100644
--- a/frontend/src/pages/Terminal.jsx
+++ b/frontend/src/pages/Terminal.jsx
@@ -199,7 +199,7 @@ const LogFilesTab = () => {
Log Files
-
+
@@ -317,6 +317,7 @@ const LogFilesTab = () => {
const JournalTab = () => {
const [logs, setLogs] = useState('');
const [loading, setLoading] = useState(false);
+ const [unavailable, setUnavailable] = useState(false);
const [unit, setUnit] = useState('');
const [lineCount, setLineCount] = useState(100);
const [priority, setPriority] = useState('');
@@ -327,11 +328,17 @@ const JournalTab = () => {
async function loadJournalLogs() {
setLoading(true);
+ setUnavailable(false);
try {
const data = await api.getJournalLogs(unit || null, lineCount);
setLogs(data.content || data.logs || 'No logs available');
} catch (err) {
- setLogs(`Error: ${err.message}`);
+ const msg = err.message || '';
+ if (msg.includes('not available') || msg.includes('not found')) {
+ setUnavailable(true);
+ } else {
+ setLogs(`Error: ${msg}`);
+ }
} finally {
setLoading(false);
}
@@ -341,6 +348,30 @@ const JournalTab = () => {
loadJournalLogs();
}, []);
+ if (unavailable) {
+ return (
+
+
+
+
+
+
+
+
+
System Journal Unavailable
+
+ journalctl is not available on this system.
+ This typically means the server is running without systemd
+ (e.g. a minimal Docker container or Windows dev environment).
+
+
+ Use the Log Files tab to browse available log files instead.
+
+
+
+ );
+ }
+
return (
@@ -701,7 +732,7 @@ const ServicesTab = () => {
-
+
diff --git a/frontend/src/styles/_variables.less b/frontend/src/styles/_variables.less
index 5aaf1a7..c83473f 100644
--- a/frontend/src/styles/_variables.less
+++ b/frontend/src/styles/_variables.less
@@ -103,7 +103,7 @@
// TYPOGRAPHY
// --------------------------------------------
@font-main: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
-@font-mono: 'Fira Code', 'Monaco', 'Consolas', monospace;
+@font-mono: 'JetBrains Mono', 'Fira Code', 'Monaco', 'Consolas', monospace;
@font-size-xs: 10px;
@font-size-sm: 12px;
diff --git a/frontend/src/styles/components/_tabs.less b/frontend/src/styles/components/_tabs.less
index fae8e50..34466a9 100644
--- a/frontend/src/styles/components/_tabs.less
+++ b/frontend/src/styles/components/_tabs.less
@@ -4,7 +4,7 @@
.tabs {
display: flex;
- gap: @space-1;
+ gap: @space-2;
border-bottom: 1px solid @border-subtle;
margin-bottom: @space-6;
overflow-x: auto;
diff --git a/frontend/src/styles/layout/_sidebar.less b/frontend/src/styles/layout/_sidebar.less
index 9936058..60494c7 100644
--- a/frontend/src/styles/layout/_sidebar.less
+++ b/frontend/src/styles/layout/_sidebar.less
@@ -414,6 +414,9 @@
transition: all 0.2s ease;
margin-bottom: 2px;
position: relative;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
// Pro Hover State
&:hover {
diff --git a/frontend/src/styles/pages/_downloads.less b/frontend/src/styles/pages/_downloads.less
index 28da154..b3d979b 100644
--- a/frontend/src/styles/pages/_downloads.less
+++ b/frontend/src/styles/pages/_downloads.less
@@ -76,12 +76,19 @@
.version-banner {
background: linear-gradient(135deg, @accent-primary 0%, #4f46e5 100%);
border-radius: @radius-lg;
- padding: @space-6;
+ padding: @space-6 @space-8;
margin-bottom: @space-8;
display: flex;
justify-content: space-between;
align-items: center;
- color: var(--text-primary);
+ color: @white;
+
+ @media (max-width: @breakpoint-md) {
+ flex-direction: column;
+ align-items: flex-start;
+ gap: @space-4;
+ padding: @space-5;
+ }
.version-info {
display: flex;
@@ -99,29 +106,57 @@
font-size: @font-size-3xl;
font-weight: 700;
}
+
+ .version-date {
+ font-size: @font-size-sm;
+ opacity: 0.75;
+ }
}
- .version-meta {
+ .version-actions {
display: flex;
- flex-direction: column;
- align-items: flex-end;
- gap: @space-2;
+ align-items: center;
+ gap: @space-3;
+ }
- span {
- font-size: @font-size-sm;
- opacity: 0.9;
+ .btn-banner-outline {
+ background: transparent;
+ border: 1px solid rgba(255, 255, 255, 0.4);
+ color: @white;
+ padding: @space-2 @space-4;
+ border-radius: @radius-md;
+ font-size: @font-size-sm;
+ cursor: pointer;
+ transition: all 0.2s;
+
+ &:hover {
+ background: rgba(255, 255, 255, 0.1);
+ border-color: rgba(255, 255, 255, 0.7);
}
+ }
- .release-notes-link {
- color: @white;
- text-decoration: underline;
- font-size: @font-size-sm;
- opacity: 0.9;
- transition: opacity 0.2s;
+ .btn-banner-primary {
+ background: @white;
+ color: @accent-primary;
+ border: none;
+ padding: @space-2 @space-5;
+ border-radius: @radius-md;
+ font-size: @font-size-sm;
+ font-weight: 600;
+ cursor: pointer;
+ display: flex;
+ align-items: center;
+ gap: @space-2;
+ transition: all 0.2s;
- &:hover {
- opacity: 1;
- }
+ svg {
+ width: 16px;
+ height: 16px;
+ }
+
+ &:hover {
+ background: rgba(255, 255, 255, 0.9);
+ transform: translateY(-1px);
}
}
}
@@ -161,10 +196,12 @@
background: @bg-card;
border: 1px solid @border-default;
border-radius: @radius-lg;
- padding: @space-5;
+ padding: @space-6;
display: flex;
flex-direction: column;
- gap: @space-4;
+ align-items: center;
+ text-align: center;
+ gap: @space-3;
transition: all 0.2s;
&:hover {
@@ -181,28 +218,33 @@
}
}
- .platform-header {
+ .platform-icon-wrapper {
+ width: 56px;
+ height: 56px;
display: flex;
align-items: center;
- gap: @space-3;
+ justify-content: center;
+ background: @bg-elevated;
+ border-radius: @radius-lg;
+ margin-bottom: @space-1;
.platform-icon {
- width: 40px;
- height: 40px;
+ width: 32px;
+ height: 32px;
color: @text-primary;
}
+ }
- .platform-info {
- h3 {
- font-size: @font-size-lg;
- font-weight: 600;
- margin: 0;
- }
+ .platform-info {
+ h3 {
+ font-size: @font-size-lg;
+ font-weight: 600;
+ margin: 0 0 @space-1;
+ }
- .platform-arch {
- font-size: @font-size-sm;
- color: @text-secondary;
- }
+ .platform-arch {
+ font-size: @font-size-sm;
+ color: @text-secondary;
}
}
@@ -212,6 +254,7 @@
justify-content: center;
gap: @space-2;
width: 100%;
+ margin-top: auto;
svg {
width: 18px;
@@ -335,7 +378,7 @@
justify-content: center;
font-weight: 600;
font-size: @font-size-sm;
- color: var(--text-primary);
+ color: @white;
}
.step-content {
diff --git a/frontend/src/styles/pages/_security.less b/frontend/src/styles/pages/_security.less
index c12645e..9c3e184 100644
--- a/frontend/src/styles/pages/_security.less
+++ b/frontend/src/styles/pages/_security.less
@@ -5,7 +5,7 @@
.security-page {
.tabs-nav {
display: flex;
- gap: @space-1;
+ gap: @space-2;
margin-bottom: @space-6;
border-bottom: 1px solid @border-subtle;
padding-bottom: @space-2;
@@ -110,55 +110,96 @@
.scanner-tab {
.scan-options {
display: grid;
- grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
- gap: @space-6;
- align-items: start;
+ grid-template-columns: repeat(3, 1fr);
+ gap: @space-4;
+ margin-bottom: @space-4;
+
+ @media (max-width: @breakpoint-md) {
+ grid-template-columns: 1fr;
+ }
}
- .scan-preset {
+ .scan-card {
display: flex;
flex-direction: column;
align-items: center;
- gap: @space-2;
text-align: center;
+ padding: @space-6 @space-4;
+ background: @bg-card;
+ border: 1px solid @border-subtle;
+ border-radius: @radius-lg;
+ cursor: pointer;
+ .transition(all);
- .btn-lg {
- display: flex;
- align-items: center;
- gap: @space-2;
- padding: @space-4 @space-8;
- font-size: @font-size-md;
+ &:hover {
+ border-color: @accent-primary;
+ background: @bg-elevated;
+ }
- svg {
- flex-shrink: 0;
- }
+ h4 {
+ margin: 0 0 @space-1;
+ font-size: @font-size-md;
+ font-weight: @font-weight-semibold;
}
.scan-desc {
color: @text-secondary;
font-size: @font-size-sm;
+ margin-bottom: @space-4;
+ }
+
+ .btn {
+ margin-top: auto;
}
}
- .scan-custom {
- label {
- display: block;
- margin-bottom: @space-2;
- font-weight: 500;
+ .scan-card-icon {
+ width: 48px;
+ height: 48px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ border-radius: @radius-md;
+ background: fade(@accent-primary, 10%);
+ color: @accent-primary;
+ margin-bottom: @space-3;
+ }
+
+ .scan-card--custom {
+ cursor: default;
+
+ &:hover {
+ border-color: @border-subtle;
+ background: @bg-card;
}
+ }
+
+ .scan-custom-input {
+ display: flex;
+ gap: @space-2;
+ width: 100%;
+ margin-top: auto;
- .input-group {
+ input {
+ flex: 1;
+ min-width: 0;
+ }
+ }
+
+ .scan-toolbar {
+ display: flex;
+ justify-content: flex-end;
+ margin-bottom: @space-4;
+
+ .btn {
display: flex;
+ align-items: center;
gap: @space-2;
-
- input {
- flex: 1;
- }
}
}
.scan-progress {
- margin-top: @space-6;
+ margin-bottom: @space-4;
border-color: @accent-primary;
.progress-info {
diff --git a/frontend/src/styles/pages/_terminal.less b/frontend/src/styles/pages/_terminal.less
index 9e52c33..ebe91d0 100644
--- a/frontend/src/styles/pages/_terminal.less
+++ b/frontend/src/styles/pages/_terminal.less
@@ -245,17 +245,18 @@
.log-content {
flex: 1;
overflow: auto;
- padding: @space-4;
+ padding: 1rem;
background: var(--bg-code);
pre {
margin: 0;
- font-family: 'JetBrains Mono', 'Fira Code', monospace;
- font-size: 12px;
- line-height: 1.6;
+ font-family: @font-mono;
+ font-size: @font-size-sm;
+ line-height: 1.4;
color: var(--text-code);
white-space: pre-wrap;
- word-break: break-all;
+ word-wrap: break-word;
+ overflow-wrap: break-word;
}
.empty-viewer {
@@ -362,16 +363,17 @@
background: var(--bg-code);
border-radius: @radius-lg;
overflow: auto;
- padding: @space-4;
+ padding: 1rem;
pre {
margin: 0;
- font-family: 'JetBrains Mono', 'Fira Code', monospace;
- font-size: 12px;
- line-height: 1.6;
+ font-family: @font-mono;
+ font-size: @font-size-sm;
+ line-height: 1.4;
color: var(--text-code);
white-space: pre-wrap;
- word-break: break-all;
+ word-wrap: break-word;
+ overflow-wrap: break-word;
}
}
@@ -418,14 +420,40 @@
.processes-table {
width: 100%;
min-width: 800px;
+ table-layout: fixed;
+ border-collapse: collapse;
+
+ // Column widths: PID | Name | User | CPU% | Mem% | Memory | Status | Actions
+ th, td {
+ padding: @space-2 @space-3;
+ text-align: left;
+ vertical-align: middle;
+ font-size: @font-size-sm;
+ }
th {
position: sticky;
top: 0;
background: var(--bg-secondary);
z-index: 1;
+ font-weight: @font-weight-semibold;
+ color: @text-secondary;
+ font-size: @font-size-xs;
+ text-transform: uppercase;
+ letter-spacing: 0.03em;
+ border-bottom: 1px solid var(--border-color);
}
+ // Explicit column widths for alignment
+ th:nth-child(1), td:nth-child(1) { width: 70px; } // PID
+ th:nth-child(2), td:nth-child(2) { width: auto; } // Name (flexible)
+ th:nth-child(3), td:nth-child(3) { width: 90px; } // User
+ th:nth-child(4), td:nth-child(4) { width: 110px; } // CPU %
+ th:nth-child(5), td:nth-child(5) { width: 110px; } // Memory %
+ th:nth-child(6), td:nth-child(6) { width: 90px; } // Memory
+ th:nth-child(7), td:nth-child(7) { width: 80px; } // Status
+ th:nth-child(8), td:nth-child(8) { width: 80px; } // Actions
+
tr {
cursor: pointer;
transition: background 0.15s ease;
@@ -439,13 +467,16 @@
}
}
+ tbody tr {
+ border-bottom: 1px solid var(--border-color);
+ }
+
.mono {
- font-family: 'JetBrains Mono', monospace;
+ font-family: @font-mono;
font-size: @font-size-xs;
}
.process-name {
- max-width: 200px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
@@ -453,16 +484,14 @@
.usage-cell {
position: relative;
- min-width: 100px;
.usage-bar {
position: absolute;
left: 0;
- top: 50%;
- transform: translateY(-50%);
- height: 4px;
+ bottom: 0;
+ height: 3px;
border-radius: 2px;
- opacity: 0.3;
+ opacity: 0.4;
&.cpu {
background: @accent-primary;
@@ -476,7 +505,7 @@
span {
position: relative;
font-size: @font-size-xs;
- font-family: monospace;
+ font-family: @font-mono;
}
}
@@ -542,7 +571,7 @@
font-weight: 500;
&.mono {
- font-family: monospace;
+ font-family: @font-mono;
}
}
}
@@ -564,7 +593,7 @@
padding: @space-2;
background: var(--bg-tertiary);
border-radius: @radius-sm;
- font-family: monospace;
+ font-family: @font-mono;
font-size: @font-size-xs;
word-break: break-all;
}
@@ -687,15 +716,16 @@
overflow: auto;
background: var(--bg-code);
border-radius: @radius-md;
- padding: @space-4;
+ padding: 1rem;
pre {
margin: 0;
- font-family: 'JetBrains Mono', monospace;
- font-size: 12px;
- line-height: 1.6;
+ font-family: @font-mono;
+ font-size: @font-size-sm;
+ line-height: 1.4;
color: var(--text-code);
white-space: pre-wrap;
- word-break: break-all;
+ word-wrap: break-word;
+ overflow-wrap: break-word;
}
}
From 3491e888777426d3397aa7baf2e1a91d5647c97e Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Wed, 4 Mar 2026 00:02:52 -0500
Subject: [PATCH 07/18] Add log fallbacks, privilege helpers, and checks
Introduce safer system interactions and improved logging fallbacks across services. Replace direct sudo/subprocess calls with run_privileged/privileged_cmd, add command availability checks (is_command_available) and Linux guards for user management. Enhance LogService with fallback readers (python file I/O, grep fallback, syslog and Windows Event Log support) and standardize responses via sourced_result. Update frontend Terminal to display log source and adjust UI when journalctl is unavailable. Misc: delegate ProcessService log retrieval to LogService, add PackageManager usage in PHP/SSL installs, and add warning logging in DeploymentService when diff generation fails.
---
backend/app/services/backup_service.py | 11 ++
backend/app/services/deployment_service.py | 8 +-
backend/app/services/ftp_service.py | 48 +++--
backend/app/services/log_service.py | 219 +++++++++++++++------
backend/app/services/nginx_service.py | 5 +-
backend/app/services/php_service.py | 42 ++--
backend/app/services/process_service.py | 30 +--
backend/app/services/ssl_service.py | 29 +--
backend/app/services/system_service.py | 20 +-
backend/app/services/wordpress_service.py | 64 +++---
backend/app/utils/system.py | 15 ++
frontend/src/pages/Terminal.jsx | 89 +++++----
frontend/src/styles/pages/_terminal.less | 16 ++
13 files changed, 377 insertions(+), 219 deletions(-)
diff --git a/backend/app/services/backup_service.py b/backend/app/services/backup_service.py
index 5104c9d..32f1d69 100644
--- a/backend/app/services/backup_service.py
+++ b/backend/app/services/backup_service.py
@@ -13,6 +13,7 @@
import schedule
from app import paths
+from app.utils.system import is_command_available
class BackupService:
@@ -145,6 +146,8 @@ def _backup_database_internal(cls, db_type: str, db_name: str,
try:
if db_type == 'mysql':
+ if not is_command_available('mysqldump'):
+ return {'success': False, 'error': 'mysqldump not installed'}
cmd = ['mysqldump']
if config.get('user'):
cmd.extend(['-u', config['user']])
@@ -161,6 +164,9 @@ def _backup_database_internal(cls, db_type: str, db_name: str,
return {'success': False, 'error': result.stderr}
elif db_type == 'postgresql':
+ if not is_command_available('pg_dump'):
+ return {'success': False, 'error': 'pg_dump not installed'}
+
env = os.environ.copy()
if config.get('password'):
env['PGPASSWORD'] = config['password']
@@ -335,6 +341,8 @@ def restore_database(cls, backup_path: str, db_type: str, db_name: str,
try:
if db_type == 'mysql':
+ if not is_command_available('mysql'):
+ return {'success': False, 'error': 'mysql client not installed'}
cmd = ['mysql']
if user:
cmd.extend(['-u', user])
@@ -348,6 +356,9 @@ def restore_database(cls, backup_path: str, db_type: str, db_name: str,
result = subprocess.run(cmd, stdin=f, capture_output=True, text=True)
elif db_type == 'postgresql':
+ if not is_command_available('psql'):
+ return {'success': False, 'error': 'psql client not installed'}
+
env = os.environ.copy()
if password:
env['PGPASSWORD'] = password
diff --git a/backend/app/services/deployment_service.py b/backend/app/services/deployment_service.py
index 29b21d6..4f9f69f 100644
--- a/backend/app/services/deployment_service.py
+++ b/backend/app/services/deployment_service.py
@@ -9,6 +9,7 @@
- Diff generation between deployments
"""
+import logging
import os
import subprocess
import json
@@ -26,6 +27,9 @@
from app import paths
+logger = logging.getLogger(__name__)
+
+
class DeploymentService:
"""Service for orchestrating deployments."""
@@ -543,8 +547,8 @@ def _generate_diff(cls, deployment: Deployment) -> None:
db.session.add(diff)
db.session.commit()
- except Exception:
- pass
+ except Exception as e:
+ logger.warning('Failed to generate deployment diff: %s', e)
@classmethod
def get_deployments(cls, app_id: int, limit: int = 20, offset: int = 0) -> List[Dict]:
diff --git a/backend/app/services/ftp_service.py b/backend/app/services/ftp_service.py
index ac29575..90df625 100644
--- a/backend/app/services/ftp_service.py
+++ b/backend/app/services/ftp_service.py
@@ -4,7 +4,7 @@
import subprocess
import re
-from app.utils.system import PackageManager, ServiceControl, run_privileged
+from app.utils.system import PackageManager, ServiceControl, run_privileged, privileged_cmd
try:
import pwd
except ImportError:
@@ -285,6 +285,9 @@ def _update_proftpd_config(cls, settings: Dict) -> Dict:
@classmethod
def list_users(cls) -> Dict:
"""List FTP users."""
+ if pwd is None:
+ return {'success': False, 'error': 'User management requires Linux'}
+
try:
users = []
@@ -340,6 +343,9 @@ def list_users(cls) -> Dict:
@classmethod
def create_user(cls, username: str, password: str = None, home_dir: str = None) -> Dict:
"""Create a new FTP user."""
+ if pwd is None:
+ return {'success': False, 'error': 'User management requires Linux'}
+
# Validate username
if not re.match(r'^[a-z][a-z0-9_-]{2,31}$', username):
return {'success': False, 'error': 'Invalid username. Use lowercase letters, numbers, underscore, hyphen. 3-32 chars.'}
@@ -361,26 +367,29 @@ def create_user(cls, username: str, password: str = None, home_dir: str = None)
try:
# Create user with restricted shell
- result = subprocess.run([
- 'sudo', 'useradd',
+ result = run_privileged([
+ 'useradd',
'-m', # Create home directory
'-d', home_dir,
'-s', '/usr/sbin/nologin', # No shell access
'-c', f'FTP User {username}',
username
- ], capture_output=True, text=True)
+ ])
if result.returncode != 0:
return {'success': False, 'error': result.stderr or 'Failed to create user'}
# Set password
proc = subprocess.Popen(
- ['sudo', 'chpasswd'],
+ privileged_cmd(['chpasswd']),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
- proc.communicate(input=f'{username}:{password}'.encode())
+ stdout, stderr = proc.communicate(input=f'{username}:{password}'.encode())
+
+ if proc.returncode != 0:
+ return {'success': False, 'error': stderr.decode() or 'Failed to set password'}
# Add to vsftpd userlist if it exists
if os.path.exists(cls.VSFTPD_USER_LIST):
@@ -388,7 +397,7 @@ def create_user(cls, username: str, password: str = None, home_dir: str = None)
f.write(f'{username}\n')
# Set proper permissions on home directory
- subprocess.run(['sudo', 'chmod', '755', home_dir], capture_output=True)
+ run_privileged(['chmod', '755', home_dir])
return {
'success': True,
@@ -404,6 +413,9 @@ def create_user(cls, username: str, password: str = None, home_dir: str = None)
@classmethod
def delete_user(cls, username: str, delete_home: bool = False) -> Dict:
"""Delete an FTP user."""
+ if pwd is None:
+ return {'success': False, 'error': 'User management requires Linux'}
+
try:
# Check if user exists
try:
@@ -412,12 +424,12 @@ def delete_user(cls, username: str, delete_home: bool = False) -> Dict:
return {'success': False, 'error': 'User not found'}
# Delete user
- cmd = ['sudo', 'userdel']
+ cmd = ['userdel']
if delete_home:
cmd.append('-r')
cmd.append(username)
- result = subprocess.run(cmd, capture_output=True, text=True)
+ result = run_privileged(cmd)
if result.returncode != 0:
return {'success': False, 'error': result.stderr or 'Failed to delete user'}
@@ -437,6 +449,9 @@ def delete_user(cls, username: str, delete_home: bool = False) -> Dict:
@classmethod
def change_password(cls, username: str, new_password: str = None) -> Dict:
"""Change FTP user password."""
+ if pwd is None:
+ return {'success': False, 'error': 'User management requires Linux'}
+
try:
# Check if user exists
try:
@@ -450,7 +465,7 @@ def change_password(cls, username: str, new_password: str = None) -> Dict:
# Set password
proc = subprocess.Popen(
- ['sudo', 'chpasswd'],
+ privileged_cmd(['chpasswd']),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
@@ -537,6 +552,9 @@ def get_logs(cls, lines: int = 100) -> Dict:
@classmethod
def toggle_user(cls, username: str, enabled: bool) -> Dict:
"""Enable or disable an FTP user."""
+ if pwd is None:
+ return {'success': False, 'error': 'User management requires Linux'}
+
try:
# Check if user exists
try:
@@ -551,9 +569,8 @@ def toggle_user(cls, username: str, enabled: bool) -> Dict:
# Change shell to /bin/false to disable
shell = '/bin/false'
- result = subprocess.run(
- ['sudo', 'usermod', '-s', shell, username],
- capture_output=True, text=True
+ result = run_privileged(
+ ['usermod', '-s', shell, username]
)
if result.returncode != 0:
@@ -573,9 +590,8 @@ def toggle_user(cls, username: str, enabled: bool) -> Dict:
def disconnect_session(cls, pid: int) -> Dict:
"""Disconnect an active FTP session by PID."""
try:
- result = subprocess.run(
- ['sudo', 'kill', str(pid)],
- capture_output=True, text=True
+ result = run_privileged(
+ ['kill', str(pid)]
)
if result.returncode != 0:
diff --git a/backend/app/services/log_service.py b/backend/app/services/log_service.py
index 4d31841..43553fb 100644
--- a/backend/app/services/log_service.py
+++ b/backend/app/services/log_service.py
@@ -7,7 +7,7 @@
import queue
from app import paths
-from app.utils.system import run_privileged, privileged_cmd, is_command_available
+from app.utils.system import run_privileged, privileged_cmd, is_command_available, sourced_result
class LogService:
@@ -74,65 +74,116 @@ def get_log_files(cls) -> List[Dict]:
@classmethod
def read_log(cls, filepath: str, lines: int = 100, from_end: bool = True) -> Dict:
- """Read lines from a log file."""
+ """Read lines from a log file. Falls back to Python I/O when tail/head are unavailable."""
if not cls.is_path_allowed(filepath):
return {'success': False, 'error': 'Access denied: path not in allowed directories'}
if not os.path.exists(filepath):
return {'success': False, 'error': 'Log file not found'}
+ tool = 'tail' if from_end else 'head'
+
+ if is_command_available(tool):
+ try:
+ result = run_privileged(
+ [tool, '-n', str(lines), filepath],
+ timeout=30
+ )
+
+ if result.returncode == 0:
+ log_lines = result.stdout.split('\n')
+ return {**sourced_result(log_lines, tool, tool), 'filepath': filepath}
+ else:
+ return {'success': False, 'error': result.stderr}
+
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ # Fallback: Python file I/O
try:
- tool = 'tail' if from_end else 'head'
- result = run_privileged(
- [tool, '-n', str(lines), filepath],
- timeout=30
- )
+ with open(filepath, 'r', errors='replace') as f:
+ all_lines = f.readlines()
- if result.returncode == 0:
- log_lines = result.stdout.split('\n')
- return {
- 'success': True,
- 'lines': log_lines,
- 'count': len(log_lines),
- 'filepath': filepath
- }
+ if from_end:
+ log_lines = [l.rstrip('\n') for l in all_lines[-lines:]]
else:
- return {'success': False, 'error': result.stderr}
+ log_lines = [l.rstrip('\n') for l in all_lines[:lines]]
- except FileNotFoundError:
- return {'success': False, 'error': f'{tool} command not found'}
+ return {**sourced_result(log_lines, 'python', 'direct file read'), 'filepath': filepath}
+
+ except PermissionError:
+ return {'success': False, 'error': f'Permission denied reading {filepath}'}
except Exception as e:
return {'success': False, 'error': str(e)}
@classmethod
def search_log(cls, filepath: str, pattern: str, lines: int = 100) -> Dict:
- """Search log file for pattern."""
+ """Search log file for pattern. Falls back to Python regex when grep is unavailable."""
if not cls.is_path_allowed(filepath):
return {'success': False, 'error': 'Access denied: path not in allowed directories'}
if not os.path.exists(filepath):
return {'success': False, 'error': 'Log file not found'}
+ if is_command_available('grep'):
+ try:
+ result = run_privileged(
+ ['grep', '-i', '-m', str(lines), pattern, filepath],
+ timeout=60
+ )
+
+ # grep returns 1 if no matches (not an error)
+ if result.returncode in [0, 1]:
+ matches = result.stdout.split('\n') if result.stdout else []
+ return {
+ 'success': True,
+ 'matches': [m for m in matches if m],
+ 'count': len([m for m in matches if m]),
+ 'pattern': pattern
+ }
+ else:
+ return {'success': False, 'error': result.stderr}
+
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ # Fallback: Python regex search
+ import re
try:
- result = run_privileged(
- ['grep', '-i', '-m', str(lines), pattern, filepath],
- timeout=60
- )
+ regex = re.compile(pattern, re.IGNORECASE)
+ matches = []
+ with open(filepath, 'r', errors='replace') as f:
+ for line in f:
+ if regex.search(line):
+ matches.append(line.rstrip('\n'))
+ if len(matches) >= lines:
+ break
- # grep returns 1 if no matches (not an error)
- if result.returncode in [0, 1]:
- matches = result.stdout.split('\n') if result.stdout else []
- return {
- 'success': True,
- 'matches': [m for m in matches if m],
- 'count': len([m for m in matches if m]),
- 'pattern': pattern
- }
- else:
- return {'success': False, 'error': result.stderr}
+ return {
+ 'success': True,
+ 'matches': matches,
+ 'count': len(matches),
+ 'pattern': pattern
+ }
- except FileNotFoundError:
- return {'success': False, 'error': 'grep command not found'}
+ except re.error:
+ # Pattern might be a plain string, not valid regex — use substring match
+ matches = []
+ with open(filepath, 'r', errors='replace') as f:
+ for line in f:
+ if pattern.lower() in line.lower():
+ matches.append(line.rstrip('\n'))
+ if len(matches) >= lines:
+ break
+
+ return {
+ 'success': True,
+ 'matches': matches,
+ 'count': len(matches),
+ 'pattern': pattern
+ }
+ except PermissionError:
+ return {'success': False, 'error': f'Permission denied reading {filepath}'}
except Exception as e:
return {'success': False, 'error': str(e)}
@@ -176,13 +227,7 @@ def get_docker_app_logs(cls, app_name: str, app_dir: str, lines: int = 100) -> D
if result.returncode == 0:
log_lines = result.stdout.split('\n') if result.stdout else []
- return {
- 'success': True,
- 'lines': log_lines,
- 'count': len(log_lines),
- 'source': 'docker',
- 'app_dir': app_dir
- }
+ return {**sourced_result(log_lines, 'docker', 'Docker Compose'), 'app_dir': app_dir}
else:
# Try with docker-compose (older syntax) as fallback
result = subprocess.run(
@@ -194,13 +239,7 @@ def get_docker_app_logs(cls, app_name: str, app_dir: str, lines: int = 100) -> D
)
if result.returncode == 0:
log_lines = result.stdout.split('\n') if result.stdout else []
- return {
- 'success': True,
- 'lines': log_lines,
- 'count': len(log_lines),
- 'source': 'docker',
- 'app_dir': app_dir
- }
+ return {**sourced_result(log_lines, 'docker', 'Docker Compose (legacy)'), 'app_dir': app_dir}
return {'success': False, 'error': result.stderr or 'Failed to get Docker logs'}
except FileNotFoundError:
@@ -213,10 +252,22 @@ def get_docker_app_logs(cls, app_name: str, app_dir: str, lines: int = 100) -> D
@classmethod
def get_journalctl_logs(cls, unit: str = None, lines: int = 100,
since: str = None, priority: str = None) -> Dict:
- """Get logs from systemd journal."""
- if not is_command_available('journalctl'):
- return {'success': False, 'error': 'journalctl is not available on this system'}
+ """Get system logs, trying journalctl → syslog → Windows Event Log."""
+ if is_command_available('journalctl'):
+ return cls._read_journalctl(unit, lines, since, priority)
+
+ syslog_path = cls._find_syslog()
+ if syslog_path:
+ return cls._read_syslog(syslog_path, unit, lines)
+
+ if os.name == 'nt':
+ return cls._read_windows_eventlog(lines)
+ return {'success': False, 'error': 'No system log source available — journalctl, syslog, and Windows Event Log are all unavailable'}
+
+ @classmethod
+ def _read_journalctl(cls, unit: str, lines: int, since: str, priority: str) -> Dict:
+ """Read logs from systemd journal."""
try:
cmd = ['journalctl', '-n', str(lines), '--no-pager', '-o', 'short-iso']
@@ -231,11 +282,7 @@ def get_journalctl_logs(cls, unit: str = None, lines: int = 100,
if result.returncode == 0:
log_lines = result.stdout.split('\n')
- return {
- 'success': True,
- 'lines': log_lines,
- 'count': len(log_lines)
- }
+ return sourced_result(log_lines, 'journalctl', 'systemd journal')
else:
return {'success': False, 'error': result.stderr}
@@ -244,6 +291,62 @@ def get_journalctl_logs(cls, unit: str = None, lines: int = 100,
except Exception as e:
return {'success': False, 'error': str(e)}
+ @staticmethod
+ def _find_syslog() -> Optional[str]:
+ """Return the first existing syslog path, or None."""
+ for path in ['/var/log/syslog', '/var/log/messages']:
+ if os.path.exists(path):
+ return path
+ return None
+
+ @classmethod
+ def _read_syslog(cls, filepath: str, service: str, lines: int) -> Dict:
+ """Read system logs from a syslog file, optionally filtering by service."""
+ try:
+ if service:
+ result = run_privileged(
+ ['bash', '-c', f'grep -i {subprocess.list2cmdline([service])} {subprocess.list2cmdline([filepath])} | tail -n {int(lines)}'],
+ timeout=60,
+ )
+ else:
+ result = run_privileged(
+ ['tail', '-n', str(lines), filepath],
+ timeout=60,
+ )
+
+ if result.returncode == 0 or (service and result.returncode == 1):
+ log_lines = result.stdout.split('\n') if result.stdout else []
+ return sourced_result(log_lines, 'syslog', filepath)
+ else:
+ return {'success': False, 'error': result.stderr}
+
+ except FileNotFoundError:
+ return {'success': False, 'error': 'Required commands not found'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @staticmethod
+ def _read_windows_eventlog(lines: int) -> Dict:
+ """Read system logs from Windows Event Log via wevtutil."""
+ try:
+ result = subprocess.run(
+ ['wevtutil', 'qe', 'System', f'/c:{int(lines)}', '/f:text', '/rd:true'],
+ capture_output=True,
+ text=True,
+ timeout=60,
+ )
+
+ if result.returncode == 0:
+ log_lines = result.stdout.split('\n') if result.stdout else []
+ return sourced_result(log_lines, 'eventlog', 'Windows Event Log')
+ else:
+ return {'success': False, 'error': result.stderr}
+
+ except FileNotFoundError:
+ return {'success': False, 'error': 'wevtutil command not found'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
@classmethod
def clear_log(cls, filepath: str) -> Dict:
"""Clear/truncate a log file."""
diff --git a/backend/app/services/nginx_service.py b/backend/app/services/nginx_service.py
index 88b2bda..3ec1aaa 100644
--- a/backend/app/services/nginx_service.py
+++ b/backend/app/services/nginx_service.py
@@ -4,7 +4,7 @@
from typing import Dict, List, Optional
from pathlib import Path
-from app.utils.system import ServiceControl, run_privileged
+from app.utils.system import ServiceControl, run_privileged, is_command_available
class NginxService:
@@ -293,6 +293,9 @@ class NginxService:
@classmethod
def test_config(cls) -> Dict:
"""Test Nginx configuration syntax."""
+ if not is_command_available('nginx'):
+ return {'success': False, 'error': 'nginx is not installed'}
+
try:
result = run_privileged([cls.NGINX_BIN, '-t'], timeout=30)
return {
diff --git a/backend/app/services/php_service.py b/backend/app/services/php_service.py
index 1832335..2351a30 100644
--- a/backend/app/services/php_service.py
+++ b/backend/app/services/php_service.py
@@ -4,7 +4,7 @@
from typing import Dict, List, Optional
from pathlib import Path
-from app.utils.system import PackageManager, ServiceControl, run_privileged
+from app.utils.system import PackageManager, ServiceControl, run_privileged, is_command_available
class PHPService:
@@ -77,7 +77,7 @@ def get_installed_versions(cls) -> List[Dict]:
timeout=10
)
full_version = result.stdout.split('\n')[0] if result.returncode == 0 else version
- except:
+ except Exception:
full_version = version
# Check if FPM is installed
@@ -88,7 +88,7 @@ def get_installed_versions(cls) -> List[Dict]:
if fpm_installed:
try:
fpm_running = ServiceControl.is_active(f'php{version}-fpm')
- except:
+ except Exception:
pass
versions.append({
@@ -116,7 +116,7 @@ def get_default_version(cls) -> Optional[str]:
match = re.search(r'PHP (\d+\.\d+)', result.stdout)
if match:
return match.group(1)
- except:
+ except Exception:
pass
return None
@@ -150,13 +150,17 @@ def install_version(cls, version: str) -> Dict:
return {'success': False, 'error': f'Unsupported PHP version: {version}'}
try:
- # Add PHP repository if needed
- run_privileged(
- ['add-apt-repository', '-y', 'ppa:ondrej/php'],
- timeout=120,
- )
-
- run_privileged(['apt-get', 'update'], timeout=120)
+ # Add PHP repository if needed (Ubuntu/Debian only)
+ if is_command_available('add-apt-repository'):
+ run_privileged(
+ ['add-apt-repository', '-y', 'ppa:ondrej/php'],
+ timeout=120,
+ )
+
+ # Update package lists (apt-specific, safe to skip on non-apt)
+ manager = PackageManager.detect()
+ if manager == 'apt':
+ run_privileged(['apt-get', 'update'], timeout=120)
# Install PHP and common extensions
packages = [
@@ -177,10 +181,7 @@ def install_version(cls, version: str) -> Dict:
f'php{version}-bcmath',
]
- result = run_privileged(
- ['apt-get', 'install', '-y'] + packages,
- timeout=600,
- )
+ result = PackageManager.install(packages, timeout=600)
if result.returncode == 0:
# Start FPM service
@@ -216,7 +217,7 @@ def get_extensions(cls, version: str) -> List[Dict]:
'name': ext,
'enabled': True
})
- except:
+ except Exception:
pass
return extensions
@@ -227,10 +228,7 @@ def install_extension(cls, version: str, extension: str) -> Dict:
package = f'php{version}-{extension}'
try:
- result = run_privileged(
- ['apt-get', 'install', '-y', package],
- timeout=120,
- )
+ result = PackageManager.install(package, timeout=120)
if result.returncode == 0:
# Restart FPM to load extension
@@ -263,7 +261,7 @@ def get_pools(cls, version: str) -> List[Dict]:
'pm': config.get('pm', 'dynamic'),
'max_children': config.get('pm.max_children', '5')
})
- except Exception as e:
+ except Exception:
pass
return pools
@@ -279,7 +277,7 @@ def _parse_pool_config(cls, filepath: str) -> Dict:
if line and not line.startswith(';') and '=' in line:
key, value = line.split('=', 1)
config[key.strip()] = value.strip()
- except:
+ except Exception:
pass
return config
diff --git a/backend/app/services/process_service.py b/backend/app/services/process_service.py
index af1da4b..7dfb920 100644
--- a/backend/app/services/process_service.py
+++ b/backend/app/services/process_service.py
@@ -3,7 +3,7 @@
import platform
from typing import List, Dict, Optional
-from app.utils.system import run_privileged, is_command_available
+from app.utils.system import run_privileged
class ProcessService:
@@ -171,28 +171,6 @@ def control_service(cls, service_name: str, action: str) -> Dict:
@classmethod
def get_service_logs(cls, service_name: str, lines: int = 100) -> Dict:
- """Get recent logs for a service."""
- system = platform.system()
-
- try:
- if system == 'Linux':
- if not is_command_available('journalctl'):
- return {'success': False, 'error': 'journalctl is not available on this system'}
-
- result = run_privileged(
- ['journalctl', '-u', service_name, '-n', str(lines), '--no-pager'],
- timeout=30
- )
-
- if result.returncode == 0:
- return {'success': True, 'logs': result.stdout}
- else:
- return {'success': False, 'error': result.stderr}
-
- else:
- return {'success': False, 'error': 'Log retrieval not supported on this platform'}
-
- except FileNotFoundError:
- return {'success': False, 'error': 'journalctl command not found'}
- except Exception as e:
- return {'success': False, 'error': str(e)}
+ """Get recent logs for a service via LogService fallback chain."""
+ from app.services.log_service import LogService
+ return LogService.get_journalctl_logs(unit=service_name, lines=lines)
diff --git a/backend/app/services/ssl_service.py b/backend/app/services/ssl_service.py
index 536b7b2..9ea73f6 100644
--- a/backend/app/services/ssl_service.py
+++ b/backend/app/services/ssl_service.py
@@ -5,7 +5,7 @@
from typing import Dict, List, Optional
from pathlib import Path
-from app.utils.system import ServiceControl, run_privileged
+from app.utils.system import ServiceControl, run_privileged, PackageManager, is_command_available
class SSLService:
@@ -18,26 +18,17 @@ class SSLService:
@classmethod
def is_certbot_installed(cls) -> bool:
"""Check if certbot is installed."""
- try:
- result = subprocess.run(
- ['which', 'certbot'],
- capture_output=True,
- text=True
- )
- return result.returncode == 0
- except Exception:
- return False
+ return is_command_available('certbot')
@classmethod
def install_certbot(cls) -> Dict:
"""Install certbot if not present."""
- try:
- result = run_privileged(['apt-get', 'update'], timeout=300)
- if result.returncode != 0:
- return {'success': False, 'error': result.stderr}
+ if not PackageManager.is_available():
+ return {'success': False, 'error': 'No supported package manager found'}
- result = run_privileged(
- ['apt-get', 'install', '-y', 'certbot', 'python3-certbot-nginx'],
+ try:
+ result = PackageManager.install(
+ ['certbot', 'python3-certbot-nginx'],
timeout=300,
)
if result.returncode != 0:
@@ -180,7 +171,7 @@ def list_certificates(cls) -> List[Dict]:
expiry_part = expiry_str.split(' (')[0]
current_cert['expiry'] = expiry_part
current_cert['expiry_valid'] = 'VALID' in expiry_str
- except:
+ except Exception:
current_cert['expiry'] = expiry_str
elif line.startswith('Certificate Path:'):
current_cert['cert_path'] = line.split(':', 1)[1].strip()
@@ -190,7 +181,7 @@ def list_certificates(cls) -> List[Dict]:
if current_cert:
certificates.append(current_cert)
- except Exception as e:
+ except Exception:
pass
return certificates
@@ -226,7 +217,7 @@ def get_certificate_info(cls, domain: str) -> Optional[Dict]:
return info
- except Exception as e:
+ except Exception:
return None
@classmethod
diff --git a/backend/app/services/system_service.py b/backend/app/services/system_service.py
index e52f39e..194ba08 100644
--- a/backend/app/services/system_service.py
+++ b/backend/app/services/system_service.py
@@ -4,6 +4,8 @@
import os
from datetime import datetime
+from app.utils.system import run_privileged
+
class SystemService:
"""Service for collecting system metrics and information."""
@@ -375,23 +377,23 @@ def set_timezone(cls, timezone_id):
try:
# Try timedatectl first (systemd)
- result = subprocess.run(
- ['sudo', 'timedatectl', 'set-timezone', timezone_id],
- capture_output=True, text=True, timeout=10
+ result = run_privileged(
+ ['timedatectl', 'set-timezone', timezone_id],
+ timeout=10
)
if result.returncode == 0:
return {'success': True, 'message': f'Timezone set to {timezone_id}'}
# Fallback: symlink method
- result = subprocess.run(
- ['sudo', 'ln', '-sf', f'/usr/share/zoneinfo/{timezone_id}', '/etc/localtime'],
- capture_output=True, text=True, timeout=10
+ result = run_privileged(
+ ['ln', '-sf', f'/usr/share/zoneinfo/{timezone_id}', '/etc/localtime'],
+ timeout=10
)
if result.returncode == 0:
# Also update /etc/timezone
- subprocess.run(
- ['sudo', 'bash', '-c', f'echo "{timezone_id}" > /etc/timezone'],
- capture_output=True, text=True, timeout=10
+ run_privileged(
+ ['bash', '-c', f'echo "{timezone_id}" > /etc/timezone'],
+ timeout=10
)
return {'success': True, 'message': f'Timezone set to {timezone_id}'}
diff --git a/backend/app/services/wordpress_service.py b/backend/app/services/wordpress_service.py
index 06ef72b..22204eb 100644
--- a/backend/app/services/wordpress_service.py
+++ b/backend/app/services/wordpress_service.py
@@ -9,6 +9,7 @@
from pathlib import Path
from app import paths
+from app.utils.system import run_privileged, privileged_cmd
class WordPressService:
@@ -41,7 +42,6 @@ def install_wp_cli(cls) -> Dict:
commands = [
['curl', '-O', 'https://raw.githubusercontent.com/wp-cli/builds/gh-pages/phar/wp-cli.phar'],
['chmod', '+x', 'wp-cli.phar'],
- ['sudo', 'mv', 'wp-cli.phar', cls.WP_CLI_PATH]
]
for cmd in commands:
@@ -49,6 +49,10 @@ def install_wp_cli(cls) -> Dict:
if result.returncode != 0:
return {'success': False, 'error': result.stderr}
+ result = run_privileged(['mv', 'wp-cli.phar', cls.WP_CLI_PATH], timeout=120)
+ if result.returncode != 0:
+ return {'success': False, 'error': result.stderr}
+
return {'success': True, 'message': 'WP-CLI installed successfully'}
except Exception as e:
return {'success': False, 'error': str(e)}
@@ -67,7 +71,7 @@ def wp_cli(cls, path: str, command: List[str], user: str = 'www-data') -> Dict:
return install_result
try:
- cmd = ['sudo', '-u', user, cls.WP_CLI_PATH, '--path=' + path] + command
+ cmd = privileged_cmd(['sudo', '-u', user, cls.WP_CLI_PATH, '--path=' + path] + command)
result = subprocess.run(
cmd,
capture_output=True,
@@ -157,8 +161,8 @@ def install_wordpress(cls, path: str, config: Dict) -> Dict:
try:
# Create directory
- subprocess.run(['sudo', 'mkdir', '-p', path], capture_output=True)
- subprocess.run(['sudo', 'chown', 'www-data:www-data', path], capture_output=True)
+ run_privileged(['mkdir', '-p', path])
+ run_privileged(['chown', 'www-data:www-data', path])
# Download WordPress
download_result = cls.wp_cli(path, ['core', 'download', '--locale=en_US'])
@@ -228,7 +232,7 @@ def get_wordpress_info(cls, path: str) -> Optional[Dict]:
updates = json.loads(update_result['output'])
info['update_available'] = len(updates) > 0
info['latest_version'] = updates[0]['version'] if updates else info.get('version')
- except:
+ except Exception:
info['update_available'] = False
# Get site URL
@@ -265,7 +269,7 @@ def get_plugins(cls, path: str) -> List[Dict]:
if result['success']:
try:
return json.loads(result['output'])
- except:
+ except Exception:
return []
return []
@@ -325,7 +329,7 @@ def get_themes(cls, path: str) -> List[Dict]:
if result['success']:
try:
return json.loads(result['output'])
- except:
+ except Exception:
return []
return []
@@ -357,13 +361,12 @@ def backup_wordpress(cls, path: str, include_db: bool = True) -> Dict:
try:
# Create backup directory
- subprocess.run(['sudo', 'mkdir', '-p', backup_path], capture_output=True)
+ run_privileged(['mkdir', '-p', backup_path])
# Backup files
files_backup = os.path.join(backup_path, 'files.tar.gz')
- subprocess.run(
- ['sudo', 'tar', '-czf', files_backup, '-C', os.path.dirname(path), os.path.basename(path)],
- capture_output=True,
+ run_privileged(
+ ['tar', '-czf', files_backup, '-C', os.path.dirname(path), os.path.basename(path)],
timeout=600
)
@@ -379,7 +382,7 @@ def backup_wordpress(cls, path: str, include_db: bool = True) -> Dict:
size = sum(os.path.getsize(os.path.join(backup_path, f))
for f in os.listdir(backup_path)
if os.path.isfile(os.path.join(backup_path, f)))
- except:
+ except Exception:
size = 0
return {
@@ -433,7 +436,7 @@ def list_backups(cls, site_name: str = None) -> List[Dict]:
'size': size,
'timestamp': timestamp
})
- except:
+ except Exception:
pass
return sorted(backups, key=lambda x: x['timestamp'], reverse=True)
@@ -454,12 +457,11 @@ def restore_backup(cls, backup_name: str, target_path: str) -> Dict:
if os.path.exists(files_backup):
# Remove existing files
if os.path.exists(target_path):
- subprocess.run(['sudo', 'rm', '-rf', target_path], capture_output=True)
+ run_privileged(['rm', '-rf', target_path])
# Extract backup
- subprocess.run(
- ['sudo', 'tar', '-xzf', files_backup, '-C', os.path.dirname(target_path)],
- capture_output=True,
+ run_privileged(
+ ['tar', '-xzf', files_backup, '-C', os.path.dirname(target_path)],
timeout=600
)
@@ -486,7 +488,7 @@ def delete_backup(cls, backup_name: str) -> Dict:
return {'success': False, 'error': 'Backup not found'}
try:
- subprocess.run(['sudo', 'rm', '-rf', backup_path], capture_output=True)
+ run_privileged(['rm', '-rf', backup_path])
return {'success': True, 'message': 'Backup deleted'}
except Exception as e:
return {'success': False, 'error': str(e)}
@@ -531,26 +533,24 @@ def _set_permissions(cls, path: str):
"""Set secure file permissions for WordPress."""
try:
# Set ownership
- subprocess.run(['sudo', 'chown', '-R', 'www-data:www-data', path], capture_output=True)
+ run_privileged(['chown', '-R', 'www-data:www-data', path])
# Set directory permissions
- subprocess.run(
- ['sudo', 'find', path, '-type', 'd', '-exec', 'chmod', '755', '{}', ';'],
- capture_output=True
+ run_privileged(
+ ['find', path, '-type', 'd', '-exec', 'chmod', '755', '{}', ';']
)
# Set file permissions
- subprocess.run(
- ['sudo', 'find', path, '-type', 'f', '-exec', 'chmod', '644', '{}', ';'],
- capture_output=True
+ run_privileged(
+ ['find', path, '-type', 'f', '-exec', 'chmod', '644', '{}', ';']
)
# Protect wp-config.php
wp_config = os.path.join(path, 'wp-config.php')
if os.path.exists(wp_config):
- subprocess.run(['sudo', 'chmod', '600', wp_config], capture_output=True)
+ run_privileged(['chmod', '600', wp_config])
- except:
+ except Exception:
pass
@classmethod
@@ -598,13 +598,11 @@ def _create_htaccess_security(cls, path: str):
# Only add if not already present
if '# ServerKit Security Rules' not in existing:
new_content = security_rules + '\n' + existing
- subprocess.run(
- ['sudo', 'tee', htaccess_path],
- input=new_content,
- capture_output=True,
- text=True
+ run_privileged(
+ ['tee', htaccess_path],
+ input=new_content
)
- except:
+ except Exception:
pass
@classmethod
diff --git a/backend/app/utils/system.py b/backend/app/utils/system.py
index 1c58ea8..1c46620 100644
--- a/backend/app/utils/system.py
+++ b/backend/app/utils/system.py
@@ -75,6 +75,21 @@ def is_command_available(cmd: str) -> bool:
return False
+def sourced_result(lines: list, source: str, source_label: str) -> dict:
+ """Standard response shape for multi-source data endpoints.
+
+ Every fallback-chain endpoint should return this shape so the frontend
+ can show a consistent source-aware banner.
+ """
+ return {
+ 'success': True,
+ 'lines': lines,
+ 'count': len(lines),
+ 'source': source,
+ 'source_label': source_label,
+ }
+
+
class PackageManager:
"""Cross-distro package management helpers.
diff --git a/frontend/src/pages/Terminal.jsx b/frontend/src/pages/Terminal.jsx
index 3e72290..29fc3b8 100644
--- a/frontend/src/pages/Terminal.jsx
+++ b/frontend/src/pages/Terminal.jsx
@@ -321,20 +321,26 @@ const JournalTab = () => {
const [unit, setUnit] = useState('');
const [lineCount, setLineCount] = useState(100);
const [priority, setPriority] = useState('');
+ const [source, setSource] = useState('');
+ const [sourceLabel, setSourceLabel] = useState('');
const [commonUnits] = useState([
'nginx', 'apache2', 'mysql', 'mariadb', 'postgresql',
'php-fpm', 'docker', 'sshd', 'cron', 'systemd'
]);
+ const isJournalctl = source === 'journalctl' || source === '';
+
async function loadJournalLogs() {
setLoading(true);
setUnavailable(false);
try {
const data = await api.getJournalLogs(unit || null, lineCount);
- setLogs(data.content || data.logs || 'No logs available');
+ setLogs(data.lines?.join('\n') || 'No logs available');
+ setSource(data.source || '');
+ setSourceLabel(data.source_label || '');
} catch (err) {
const msg = err.message || '';
- if (msg.includes('not available') || msg.includes('not found')) {
+ if (msg.includes('No system log source available') || msg.includes('unavailable')) {
setUnavailable(true);
} else {
setLogs(`Error: ${msg}`);
@@ -358,11 +364,11 @@ const JournalTab = () => {
- System Journal Unavailable
+ System Logs Unavailable
- journalctl is not available on this system.
- This typically means the server is running without systemd
- (e.g. a minimal Docker container or Windows dev environment).
+ No system log source was found on this server.
+ Neither journalctl, /var/log/syslog,
+ nor the Windows Event Log are available.
Use the Log Files tab to browse available log files instead.
@@ -376,7 +382,7 @@ const JournalTab = () => {
-
Service/Unit
+
{isJournalctl ? 'Service/Unit' : 'Filter by service'}
{
onChange={(e) => setUnit(e.target.value)}
placeholder="All services"
/>
-
- {commonUnits.map(u => (
- setUnit(unit === u ? '' : u)}
- >
- {u}
-
- ))}
-
+ {isJournalctl && (
+
+ {commonUnits.map(u => (
+ setUnit(unit === u ? '' : u)}
+ >
+ {u}
+
+ ))}
+
+ )}
@@ -408,26 +416,41 @@ const JournalTab = () => {
-
- Priority
- setPriority(e.target.value)}>
- All
- Emergency
- Alert
- Critical
- Error
- Warning
- Notice
- Info
- Debug
-
-
+ {isJournalctl && (
+
+ Priority
+ setPriority(e.target.value)}>
+ All
+ Emergency
+ Alert
+ Critical
+ Error
+ Warning
+ Notice
+ Info
+ Debug
+
+
+ )}
{loading ? 'Loading...' : 'Load Logs'}
+ {!isJournalctl && source && (
+
+
+
+
+
+
+
+ Reading from {sourceLabel} — journalctl is not available on this system
+
+
+ )}
+
{loading ? 'Loading journal logs...' : logs}
@@ -711,7 +734,7 @@ const ServicesTab = () => {
setShowLogsModal(true);
try {
const data = await api.getJournalLogs(serviceName, 100);
- setServiceLogs(data.content || data.logs || 'No logs available');
+ setServiceLogs(data.lines?.join('\n') || 'No logs available');
} catch (err) {
setServiceLogs(`Error loading logs: ${err.message}`);
}
diff --git a/frontend/src/styles/pages/_terminal.less b/frontend/src/styles/pages/_terminal.less
index ebe91d0..8161112 100644
--- a/frontend/src/styles/pages/_terminal.less
+++ b/frontend/src/styles/pages/_terminal.less
@@ -358,6 +358,22 @@
}
}
+.journal-source-notice {
+ display: flex;
+ align-items: center;
+ gap: @space-2;
+ padding: @space-2 @space-4;
+ background: var(--bg-secondary);
+ border: 1px solid var(--border-color);
+ border-radius: @radius-md;
+ font-size: @font-size-xs;
+ color: var(--text-secondary);
+ margin-top: -@space-4;
+
+ svg { stroke: currentColor; fill: none; stroke-width: 2; flex-shrink: 0; }
+ strong { color: var(--text-primary); }
+}
+
.journal-viewer {
flex: 1;
background: var(--bg-code);
From 9b2f20b07eb751dbe189d6a74a9c823deaea30e3 Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Wed, 4 Mar 2026 00:31:18 -0500
Subject: [PATCH 08/18] Add email server backend, model and UI
Introduce a full email management feature: adds EmailService (Postfix/Dovecot/SpamAssassin/OpenDKIM management, account creation, forwarding, webmail, queue/logs, service control), new email API blueprint with admin-protected endpoints, and EmailAccount DB model. Register the email blueprint in app init. Frontend: add Email page, route and sidebar link, preload accent color + theme to prevent FOUC, new email styles and a dashboard layout hook. Also update ROADMAP.md to include Phase 21 SSO/OAuth and bump roadmap last-updated date.
---
ROADMAP.md | 21 +-
backend/app/__init__.py | 4 +
backend/app/api/email.py | 350 +++++
backend/app/models/__init__.py | 3 +-
backend/app/models/email_account.py | 36 +
backend/app/services/email_service.py | 932 +++++++++++++
frontend/index.html | 18 +-
frontend/src/App.jsx | 4 +
frontend/src/components/Sidebar.jsx | 7 +
frontend/src/contexts/ThemeContext.jsx | 54 +-
frontend/src/hooks/useDashboardLayout.js | 85 ++
frontend/src/pages/Dashboard.jsx | 291 +++--
frontend/src/pages/Email.jsx | 1164 +++++++++++++++++
frontend/src/pages/Settings.jsx | 88 +-
frontend/src/services/api.js | 125 ++
frontend/src/styles/_theme-variables.less | 6 +
frontend/src/styles/_variables.less | 15 +-
frontend/src/styles/components/_badges.less | 4 +-
frontend/src/styles/components/_build.less | 2 +-
frontend/src/styles/components/_cards.less | 4 +-
frontend/src/styles/components/_env-vars.less | 2 +-
.../src/styles/components/_linked-apps.less | 4 +-
frontend/src/styles/components/_modals.less | 2 +-
.../src/styles/components/_notifications.less | 4 +-
.../src/styles/components/_query-runner.less | 4 +-
frontend/src/styles/components/_spinner.less | 2 +-
frontend/src/styles/components/_toasts.less | 4 +-
.../src/styles/components/_two-factor.less | 4 +-
frontend/src/styles/components/_uptime.less | 2 +-
frontend/src/styles/components/_users.less | 2 +-
frontend/src/styles/main.less | 1 +
frontend/src/styles/pages/_applications.less | 6 +-
frontend/src/styles/pages/_backups.less | 2 +-
frontend/src/styles/pages/_cron.less | 2 +-
frontend/src/styles/pages/_email.less | 265 ++++
frontend/src/styles/pages/_file-manager.less | 2 +-
frontend/src/styles/pages/_git.less | 2 +-
frontend/src/styles/pages/_monitoring.less | 2 +-
frontend/src/styles/pages/_security.less | 8 +-
frontend/src/styles/pages/_servers.less | 2 +-
frontend/src/styles/pages/_settings.less | 167 ++-
frontend/src/styles/pages/_setup-wizard.less | 10 +-
.../src/styles/pages/_wordpress-pipeline.less | 24 +-
frontend/src/styles/pages/_wordpress.less | 2 +-
frontend/src/styles/pages/_workflow.less | 18 +-
45 files changed, 3539 insertions(+), 217 deletions(-)
create mode 100644 backend/app/api/email.py
create mode 100644 backend/app/models/email_account.py
create mode 100644 backend/app/services/email_service.py
create mode 100644 frontend/src/hooks/useDashboardLayout.js
create mode 100644 frontend/src/pages/Email.jsx
create mode 100644 frontend/src/styles/pages/_email.less
diff --git a/ROADMAP.md b/ROADMAP.md
index f91f8e4..d71e485 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -267,6 +267,23 @@ This document outlines the development roadmap for ServerKit. Features are organ
---
+## Phase 21: SSO & OAuth Login (Planned)
+
+**Priority: High**
+
+- [ ] Google OAuth 2.0 login
+- [ ] GitHub OAuth login
+- [ ] Generic OpenID Connect (OIDC) provider support
+- [ ] SAML 2.0 support for enterprise environments
+- [ ] Social login UI (provider buttons on login page)
+- [ ] Account linking (connect OAuth identity to existing local account)
+- [ ] Auto-provisioning of new users on first SSO login
+- [ ] Configurable SSO settings (enable/disable providers, client ID/secret management)
+- [ ] Enforce SSO-only login (disable password auth for team members)
+- [ ] SSO session management and token refresh
+
+---
+
## Version Milestones
| Version | Target Features | Status |
@@ -276,7 +293,7 @@ This document outlines the development roadmap for ServerKit. Features are organ
| v1.1.0 | Multi-server, Git deployment | Planned |
| v1.2.0 | Backups, Advanced SSL, Advanced Security | Planned |
| v1.3.0 | Email server, API enhancements | Planned |
-| v1.4.0 | Team & permissions | Planned |
+| v1.4.0 | Team & permissions, SSO & OAuth login | Planned |
| v1.5.0 | Performance optimizations | Planned |
| v2.0.0 | Mobile app, Marketplace | Future |
@@ -304,5 +321,5 @@ Have a feature idea? Open an issue on GitHub with the `enhancement` label.
ServerKit Roadmap
- Last updated: January 2026
+ Last updated: March 2026
diff --git a/backend/app/__init__.py b/backend/app/__init__.py
index 59f03ff..444c311 100644
--- a/backend/app/__init__.py
+++ b/backend/app/__init__.py
@@ -151,6 +151,10 @@ def create_app(config_name=None):
from app.api.cron import cron_bp
app.register_blueprint(cron_bp, url_prefix='/api/v1/cron')
+ # Register blueprints - Email Server
+ from app.api.email import email_bp
+ app.register_blueprint(email_bp, url_prefix='/api/v1/email')
+
# Register blueprints - Uptime Tracking
from app.api.uptime import uptime_bp
app.register_blueprint(uptime_bp, url_prefix='/api/v1/uptime')
diff --git a/backend/app/api/email.py b/backend/app/api/email.py
new file mode 100644
index 0000000..b19555f
--- /dev/null
+++ b/backend/app/api/email.py
@@ -0,0 +1,350 @@
+from flask import Blueprint, request, jsonify
+from flask_jwt_extended import jwt_required, get_jwt_identity
+from app.models import User
+from app.services.email_service import EmailService
+
+email_bp = Blueprint('email', __name__)
+
+
+def admin_required(fn):
+ """Decorator to require admin role."""
+ from functools import wraps
+
+ @wraps(fn)
+ def wrapper(*args, **kwargs):
+ current_user_id = get_jwt_identity()
+ user = User.query.get(current_user_id)
+ if not user or user.role != 'admin':
+ return jsonify({'error': 'Admin access required'}), 403
+ return fn(*args, **kwargs)
+ return wrapper
+
+
+# ==========================================
+# STATUS & CONFIG
+# ==========================================
+
+@email_bp.route('/status', methods=['GET'])
+@jwt_required()
+def get_email_status():
+ """Get overall email server status."""
+ status = EmailService.get_status()
+ return jsonify(status), 200
+
+
+@email_bp.route('/config', methods=['GET'])
+@jwt_required()
+@admin_required
+def get_config():
+ """Get email configuration."""
+ config = EmailService.get_config()
+ return jsonify(config), 200
+
+
+@email_bp.route('/config', methods=['PUT'])
+@jwt_required()
+@admin_required
+def update_config():
+ """Update email configuration."""
+ data = request.get_json()
+ if not data:
+ return jsonify({'error': 'No data provided'}), 400
+
+ result = EmailService.save_config(data)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+# ==========================================
+# POSTFIX
+# ==========================================
+
+@email_bp.route('/postfix/install', methods=['POST'])
+@jwt_required()
+@admin_required
+def install_postfix():
+ """Install Postfix."""
+ result = EmailService.install_postfix()
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/postfix/config', methods=['GET'])
+@jwt_required()
+@admin_required
+def get_postfix_config():
+ """Get Postfix configuration."""
+ result = EmailService.get_postfix_config()
+ return jsonify(result), 200 if result.get('success') else 400
+
+
+@email_bp.route('/postfix/config', methods=['PUT'])
+@jwt_required()
+@admin_required
+def update_postfix_config():
+ """Update Postfix configuration."""
+ data = request.get_json()
+ if not data:
+ return jsonify({'error': 'No data provided'}), 400
+ result = EmailService.update_postfix_config(data)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/queue', methods=['GET'])
+@jwt_required()
+@admin_required
+def get_mail_queue():
+ """Get mail queue."""
+ result = EmailService.get_mail_queue()
+ return jsonify(result), 200 if result.get('success') else 400
+
+
+@email_bp.route('/queue/flush', methods=['POST'])
+@jwt_required()
+@admin_required
+def flush_mail_queue():
+ """Flush mail queue."""
+ result = EmailService.flush_mail_queue()
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/queue/', methods=['DELETE'])
+@jwt_required()
+@admin_required
+def delete_queued_message(queue_id):
+ """Delete a message from the queue."""
+ result = EmailService.delete_queued_message(queue_id)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+# ==========================================
+# DOVECOT
+# ==========================================
+
+@email_bp.route('/dovecot/install', methods=['POST'])
+@jwt_required()
+@admin_required
+def install_dovecot():
+ """Install Dovecot."""
+ result = EmailService.install_dovecot()
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/dovecot/config', methods=['GET'])
+@jwt_required()
+@admin_required
+def get_dovecot_config():
+ """Get Dovecot configuration."""
+ result = EmailService.get_dovecot_config()
+ return jsonify(result), 200 if result.get('success') else 400
+
+
+# ==========================================
+# EMAIL ACCOUNTS
+# ==========================================
+
+@email_bp.route('/accounts', methods=['GET'])
+@jwt_required()
+@admin_required
+def list_accounts():
+ """List all email accounts."""
+ accounts = EmailService.list_accounts()
+ return jsonify({'accounts': accounts}), 200
+
+
+@email_bp.route('/accounts', methods=['POST'])
+@jwt_required()
+@admin_required
+def create_account():
+ """Create a new email account."""
+ data = request.get_json()
+ if not data:
+ return jsonify({'error': 'No data provided'}), 400
+
+ required = ['email', 'password', 'domain']
+ for field in required:
+ if field not in data:
+ return jsonify({'error': f'{field} is required'}), 400
+
+ result = EmailService.create_account(
+ email=data['email'],
+ password=data['password'],
+ domain=data['domain'],
+ quota_mb=data.get('quota_mb', 1024)
+ )
+ return jsonify(result), 201 if result['success'] else 400
+
+
+@email_bp.route('/accounts/', methods=['PUT'])
+@jwt_required()
+@admin_required
+def update_account(account_id):
+ """Update an email account."""
+ data = request.get_json()
+ if not data:
+ return jsonify({'error': 'No data provided'}), 400
+
+ result = EmailService.update_account(account_id, **data)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/accounts/', methods=['DELETE'])
+@jwt_required()
+@admin_required
+def delete_account(account_id):
+ """Delete an email account."""
+ result = EmailService.delete_account(account_id)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/accounts//forwarding', methods=['PUT'])
+@jwt_required()
+@admin_required
+def set_forwarding(account_id):
+ """Set forwarding for an email account."""
+ data = request.get_json()
+ if not data or 'forward_to' not in data:
+ return jsonify({'error': 'forward_to is required'}), 400
+
+ result = EmailService.set_forwarding(
+ account_id,
+ forward_to=data['forward_to'],
+ keep_copy=data.get('keep_copy', True)
+ )
+ return jsonify(result), 200 if result['success'] else 400
+
+
+# ==========================================
+# SPAMASSASSIN
+# ==========================================
+
+@email_bp.route('/spamassassin/install', methods=['POST'])
+@jwt_required()
+@admin_required
+def install_spamassassin():
+ """Install SpamAssassin."""
+ result = EmailService.install_spamassassin()
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/spamassassin/config', methods=['GET'])
+@jwt_required()
+@admin_required
+def get_spamassassin_config():
+ """Get SpamAssassin configuration."""
+ result = EmailService.get_spamassassin_config()
+ return jsonify(result), 200 if result.get('success') else 400
+
+
+@email_bp.route('/spamassassin/config', methods=['PUT'])
+@jwt_required()
+@admin_required
+def update_spamassassin_config():
+ """Update SpamAssassin configuration."""
+ data = request.get_json()
+ if not data:
+ return jsonify({'error': 'No data provided'}), 400
+ result = EmailService.update_spamassassin_config(data)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+# ==========================================
+# DKIM / SPF / DMARC
+# ==========================================
+
+@email_bp.route('/dkim/install', methods=['POST'])
+@jwt_required()
+@admin_required
+def install_dkim():
+ """Install OpenDKIM."""
+ result = EmailService.install_dkim()
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/dkim/generate', methods=['POST'])
+@jwt_required()
+@admin_required
+def generate_dkim_key():
+ """Generate DKIM key for a domain."""
+ data = request.get_json()
+ if not data or 'domain' not in data:
+ return jsonify({'error': 'domain is required'}), 400
+
+ result = EmailService.generate_dkim_key(
+ domain=data['domain'],
+ selector=data.get('selector', 'mail')
+ )
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/dns/', methods=['GET'])
+@jwt_required()
+@admin_required
+def get_dns_records(domain):
+ """Get recommended DNS records for a domain."""
+ result = EmailService.get_dns_records(domain)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+# ==========================================
+# SERVICE CONTROL
+# ==========================================
+
+@email_bp.route('/services//start', methods=['POST'])
+@jwt_required()
+@admin_required
+def start_service(service):
+ """Start an email service."""
+ result = EmailService.start_service(service)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/services//stop', methods=['POST'])
+@jwt_required()
+@admin_required
+def stop_service(service):
+ """Stop an email service."""
+ result = EmailService.stop_service(service)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+@email_bp.route('/services//restart', methods=['POST'])
+@jwt_required()
+@admin_required
+def restart_service(service):
+ """Restart an email service."""
+ result = EmailService.restart_service(service)
+ return jsonify(result), 200 if result['success'] else 400
+
+
+# ==========================================
+# WEBMAIL
+# ==========================================
+
+@email_bp.route('/webmail/status', methods=['GET'])
+@jwt_required()
+def get_webmail_status():
+ """Get webmail installation status."""
+ status = EmailService.get_webmail_status()
+ return jsonify(status), 200
+
+
+@email_bp.route('/webmail/install', methods=['POST'])
+@jwt_required()
+@admin_required
+def install_webmail():
+ """Install Roundcube webmail."""
+ result = EmailService.install_webmail()
+ return jsonify(result), 200 if result['success'] else 400
+
+
+# ==========================================
+# LOGS
+# ==========================================
+
+@email_bp.route('/logs', methods=['GET'])
+@jwt_required()
+@admin_required
+def get_mail_logs():
+ """Get mail logs."""
+ lines = request.args.get('lines', 100, type=int)
+ result = EmailService.get_mail_log(lines=lines)
+ return jsonify(result), 200
diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py
index c1340a4..c3985ad 100644
--- a/backend/app/models/__init__.py
+++ b/backend/app/models/__init__.py
@@ -15,6 +15,7 @@
from app.models.environment_activity import EnvironmentActivity
from app.models.promotion_job import PromotionJob
from app.models.sanitization_profile import SanitizationProfile
+from app.models.email_account import EmailAccount
__all__ = [
'User', 'Application', 'Domain', 'EnvironmentVariable', 'EnvironmentVariableHistory',
@@ -22,5 +23,5 @@
'MetricsHistory', 'Workflow', 'GitWebhook', 'WebhookLog', 'GitDeployment',
'Server', 'ServerGroup', 'ServerMetrics', 'ServerCommand', 'AgentSession', 'SecurityAlert',
'WordPressSite', 'DatabaseSnapshot', 'SyncJob',
- 'EnvironmentActivity', 'PromotionJob', 'SanitizationProfile'
+ 'EnvironmentActivity', 'PromotionJob', 'SanitizationProfile', 'EmailAccount'
]
diff --git a/backend/app/models/email_account.py b/backend/app/models/email_account.py
new file mode 100644
index 0000000..417c028
--- /dev/null
+++ b/backend/app/models/email_account.py
@@ -0,0 +1,36 @@
+from datetime import datetime
+from app import db
+
+
+class EmailAccount(db.Model):
+ __tablename__ = 'email_accounts'
+
+ id = db.Column(db.Integer, primary_key=True)
+ email = db.Column(db.String(255), unique=True, nullable=False)
+ domain = db.Column(db.String(255), nullable=False)
+ username = db.Column(db.String(100), nullable=False)
+ quota_mb = db.Column(db.Integer, default=1024)
+ enabled = db.Column(db.Boolean, default=True)
+ created_at = db.Column(db.DateTime, default=datetime.utcnow)
+ updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
+
+ # Forwarding
+ forward_to = db.Column(db.Text, nullable=True) # comma-separated addresses
+ forward_keep_copy = db.Column(db.Boolean, default=True)
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'email': self.email,
+ 'domain': self.domain,
+ 'username': self.username,
+ 'quota_mb': self.quota_mb,
+ 'enabled': self.enabled,
+ 'forward_to': self.forward_to,
+ 'forward_keep_copy': self.forward_keep_copy,
+ 'created_at': self.created_at.isoformat() if self.created_at else None,
+ 'updated_at': self.updated_at.isoformat() if self.updated_at else None,
+ }
+
+ def __repr__(self):
+ return f''
diff --git a/backend/app/services/email_service.py b/backend/app/services/email_service.py
new file mode 100644
index 0000000..bd6c597
--- /dev/null
+++ b/backend/app/services/email_service.py
@@ -0,0 +1,932 @@
+"""
+Email Service
+
+Manages mail server components:
+- Postfix (SMTP)
+- Dovecot (IMAP/POP3)
+- SpamAssassin (spam filtering)
+- DKIM/SPF/DMARC (email authentication)
+- Email accounts (virtual users)
+- Forwarding rules
+"""
+
+import os
+import json
+import subprocess
+import re
+from datetime import datetime
+from typing import Dict, List, Optional
+
+from app import paths
+from app.utils.system import (
+ PackageManager,
+ ServiceControl,
+ run_privileged,
+ is_command_available,
+)
+
+
+class EmailService:
+ """Service for email server management."""
+
+ CONFIG_DIR = paths.SERVERKIT_CONFIG_DIR
+ EMAIL_CONFIG = os.path.join(CONFIG_DIR, 'email.json')
+
+ # Postfix paths
+ POSTFIX_MAIN_CF = '/etc/postfix/main.cf'
+ POSTFIX_MASTER_CF = '/etc/postfix/master.cf'
+ VIRTUAL_MAILBOX_DOMAINS = '/etc/postfix/virtual_domains'
+ VIRTUAL_MAILBOX_MAPS = '/etc/postfix/virtual_mailbox_maps'
+ VIRTUAL_ALIAS_MAPS = '/etc/postfix/virtual_alias_maps'
+
+ # Dovecot paths
+ DOVECOT_CONF = '/etc/dovecot/dovecot.conf'
+ DOVECOT_CONF_D = '/etc/dovecot/conf.d'
+
+ # Mail storage
+ VMAIL_DIR = '/var/vmail'
+ VMAIL_UID = 5000
+ VMAIL_GID = 5000
+
+ # OpenDKIM
+ DKIM_KEY_DIR = '/etc/opendkim/keys'
+ DKIM_KEY_TABLE = '/etc/opendkim/key.table'
+ DKIM_SIGNING_TABLE = '/etc/opendkim/signing.table'
+ DKIM_TRUSTED_HOSTS = '/etc/opendkim/trusted.hosts'
+
+ # ==========================================
+ # STATUS & CONFIG
+ # ==========================================
+
+ @classmethod
+ def get_config(cls) -> Dict:
+ """Get email server configuration."""
+ if os.path.exists(cls.EMAIL_CONFIG):
+ try:
+ with open(cls.EMAIL_CONFIG, 'r') as f:
+ return json.load(f)
+ except Exception:
+ pass
+
+ return {
+ 'postfix': {'enabled': False},
+ 'dovecot': {'enabled': False},
+ 'spamassassin': {'enabled': False},
+ 'dkim': {'enabled': False},
+ }
+
+ @classmethod
+ def save_config(cls, config: Dict) -> Dict:
+ """Save email server configuration."""
+ try:
+ os.makedirs(cls.CONFIG_DIR, exist_ok=True)
+ with open(cls.EMAIL_CONFIG, 'w') as f:
+ json.dump(config, f, indent=2)
+ return {'success': True, 'message': 'Configuration saved'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def get_status(cls) -> Dict:
+ """Get overall email server status."""
+ if os.name == 'nt':
+ return {
+ 'postfix': {'installed': False, 'running': False},
+ 'dovecot': {'installed': False, 'running': False},
+ 'spamassassin': {'installed': False, 'running': False},
+ 'opendkim': {'installed': False, 'running': False},
+ 'available': False,
+ }
+
+ postfix = cls._get_service_status('postfix')
+ dovecot = cls._get_service_status('dovecot')
+ spamassassin = cls._get_service_status('spamassassin', cmd='spamd')
+ opendkim = cls._get_service_status('opendkim')
+
+ return {
+ 'postfix': postfix,
+ 'dovecot': dovecot,
+ 'spamassassin': spamassassin,
+ 'opendkim': opendkim,
+ 'available': postfix['installed'] or dovecot['installed'],
+ }
+
+ @classmethod
+ def _get_service_status(cls, service: str, cmd: str = None) -> Dict:
+ """Get installation and running status for a service."""
+ cmd = cmd or service
+ installed = is_command_available(cmd) or PackageManager.is_installed(service)
+ running = ServiceControl.is_active(service) if installed else False
+ enabled = ServiceControl.is_enabled(service) if installed else False
+
+ result = {
+ 'installed': installed,
+ 'running': running,
+ 'enabled': enabled,
+ }
+
+ # Get version if installed
+ if installed:
+ result['version'] = cls._get_version(service)
+
+ return result
+
+ @classmethod
+ def _get_version(cls, service: str) -> Optional[str]:
+ """Get version string for a service."""
+ try:
+ if service == 'postfix':
+ r = subprocess.run(['postconf', 'mail_version'], capture_output=True, text=True)
+ if r.returncode == 0:
+ return r.stdout.strip().split('=')[-1].strip()
+ elif service == 'dovecot':
+ r = subprocess.run(['dovecot', '--version'], capture_output=True, text=True)
+ if r.returncode == 0:
+ return r.stdout.strip().split()[0]
+ elif service == 'spamassassin':
+ r = subprocess.run(['spamd', '--version'], capture_output=True, text=True)
+ if r.returncode == 0:
+ match = re.search(r'[\d.]+', r.stdout)
+ return match.group() if match else r.stdout.strip()
+ elif service == 'opendkim':
+ r = subprocess.run(['opendkim', '-V'], capture_output=True, text=True)
+ output = r.stdout or r.stderr
+ if output:
+ match = re.search(r'[\d.]+', output)
+ return match.group() if match else None
+ except FileNotFoundError:
+ pass
+ return None
+
+ # ==========================================
+ # POSTFIX
+ # ==========================================
+
+ @classmethod
+ def install_postfix(cls) -> Dict:
+ """Install Postfix MTA."""
+ try:
+ if os.name == 'nt':
+ return {'success': False, 'error': 'Postfix requires Linux'}
+
+ # Pre-seed debconf to avoid interactive prompt
+ manager = PackageManager.detect()
+ if manager == 'apt':
+ run_privileged(
+ 'debconf-set-selections <<< "postfix postfix/main_mailer_type select Internet Site"',
+ shell=True
+ )
+ run_privileged(
+ 'debconf-set-selections <<< "postfix postfix/mailname string $(hostname -f)"',
+ shell=True
+ )
+
+ result = PackageManager.install(['postfix', 'postfix-mysql'] if manager == 'apt'
+ else ['postfix'])
+ if result.returncode != 0:
+ return {'success': False, 'error': result.stderr or 'Installation failed'}
+
+ ServiceControl.enable('postfix')
+ ServiceControl.start('postfix')
+
+ return {'success': True, 'message': 'Postfix installed successfully'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def get_postfix_config(cls) -> Dict:
+ """Read key Postfix configuration values."""
+ config = {}
+ try:
+ params = ['myhostname', 'mydomain', 'myorigin', 'inet_interfaces',
+ 'mydestination', 'relay_host', 'mynetworks',
+ 'smtpd_tls_cert_file', 'smtpd_tls_key_file',
+ 'message_size_limit', 'mailbox_size_limit']
+
+ for param in params:
+ r = subprocess.run(['postconf', param], capture_output=True, text=True)
+ if r.returncode == 0:
+ key, _, value = r.stdout.strip().partition(' = ')
+ config[key.strip()] = value.strip()
+
+ return {'success': True, 'config': config}
+ except FileNotFoundError:
+ return {'success': False, 'error': 'postconf not found'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def update_postfix_config(cls, settings: Dict) -> Dict:
+ """Update Postfix configuration parameters."""
+ try:
+ for key, value in settings.items():
+ # Sanitize key — only allow word chars and underscores
+ if not re.match(r'^[a-z_]+$', key):
+ continue
+ run_privileged(['postconf', '-e', f'{key}={value}'])
+
+ ServiceControl.reload('postfix')
+ return {'success': True, 'message': 'Postfix configuration updated'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def get_mail_queue(cls) -> Dict:
+ """Get Postfix mail queue."""
+ try:
+ r = run_privileged(['mailq'])
+ if r.returncode != 0 and 'empty' not in (r.stdout or '').lower():
+ return {'success': False, 'error': r.stderr or 'Failed to get queue'}
+
+ lines = (r.stdout or '').strip().split('\n')
+ queue_items = []
+ current_item = {}
+
+ for line in lines:
+ if line.startswith('-') or 'Mail queue is empty' in line:
+ continue
+ # Queue ID line
+ match = re.match(r'^([A-F0-9]+)\s+(\d+)\s+(\w+\s+\w+\s+\d+\s+[\d:]+)\s+(.+)', line)
+ if match:
+ if current_item:
+ queue_items.append(current_item)
+ current_item = {
+ 'id': match.group(1),
+ 'size': int(match.group(2)),
+ 'date': match.group(3),
+ 'sender': match.group(4),
+ }
+ elif line.strip() and current_item:
+ # Recipient line
+ current_item.setdefault('recipients', []).append(line.strip())
+
+ if current_item:
+ queue_items.append(current_item)
+
+ return {'success': True, 'queue': queue_items, 'count': len(queue_items)}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def flush_mail_queue(cls) -> Dict:
+ """Flush the Postfix mail queue."""
+ try:
+ r = run_privileged(['postqueue', '-f'])
+ return {'success': True, 'message': 'Mail queue flushed'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def delete_queued_message(cls, queue_id: str) -> Dict:
+ """Delete a specific message from the queue."""
+ try:
+ if not re.match(r'^[A-F0-9]+$', queue_id):
+ return {'success': False, 'error': 'Invalid queue ID'}
+ r = run_privileged(['postsuper', '-d', queue_id])
+ if r.returncode != 0:
+ return {'success': False, 'error': r.stderr or 'Failed to delete message'}
+ return {'success': True, 'message': f'Message {queue_id} deleted'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ # ==========================================
+ # DOVECOT
+ # ==========================================
+
+ @classmethod
+ def install_dovecot(cls) -> Dict:
+ """Install Dovecot IMAP/POP3 server."""
+ try:
+ if os.name == 'nt':
+ return {'success': False, 'error': 'Dovecot requires Linux'}
+
+ manager = PackageManager.detect()
+ packages = ['dovecot-core', 'dovecot-imapd', 'dovecot-pop3d',
+ 'dovecot-lmtpd'] if manager == 'apt' else ['dovecot']
+
+ result = PackageManager.install(packages)
+ if result.returncode != 0:
+ return {'success': False, 'error': result.stderr or 'Installation failed'}
+
+ # Create vmail user/group
+ run_privileged(['groupadd', '-g', str(cls.VMAIL_GID), 'vmail'], timeout=10)
+ run_privileged(['useradd', '-u', str(cls.VMAIL_UID), '-g', 'vmail',
+ '-d', cls.VMAIL_DIR, '-s', '/usr/sbin/nologin', 'vmail'], timeout=10)
+ run_privileged(['mkdir', '-p', cls.VMAIL_DIR])
+ run_privileged(['chown', '-R', f'{cls.VMAIL_UID}:{cls.VMAIL_GID}', cls.VMAIL_DIR])
+
+ ServiceControl.enable('dovecot')
+ ServiceControl.start('dovecot')
+
+ return {'success': True, 'message': 'Dovecot installed successfully'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def get_dovecot_config(cls) -> Dict:
+ """Get Dovecot configuration summary."""
+ try:
+ r = subprocess.run(['doveconf', '-n'], capture_output=True, text=True)
+ if r.returncode != 0:
+ return {'success': False, 'error': 'Failed to read Dovecot config'}
+
+ config = {}
+ for line in r.stdout.split('\n'):
+ line = line.strip()
+ if '=' in line and not line.startswith('#'):
+ key, _, value = line.partition('=')
+ config[key.strip()] = value.strip()
+
+ return {'success': True, 'config': config}
+ except FileNotFoundError:
+ return {'success': False, 'error': 'doveconf not found'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ # ==========================================
+ # EMAIL ACCOUNTS
+ # ==========================================
+
+ @classmethod
+ def list_accounts(cls) -> List[Dict]:
+ """List all email accounts from the database."""
+ from app.models import EmailAccount
+ accounts = EmailAccount.query.all()
+ return [a.to_dict() for a in accounts]
+
+ @classmethod
+ def create_account(cls, email: str, password: str, domain: str,
+ quota_mb: int = 1024) -> Dict:
+ """Create a new email account."""
+ from app.models import EmailAccount
+ from app import db
+
+ try:
+ if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', email):
+ return {'success': False, 'error': 'Invalid email address'}
+
+ existing = EmailAccount.query.filter_by(email=email).first()
+ if existing:
+ return {'success': False, 'error': 'Email account already exists'}
+
+ username = email.split('@')[0]
+
+ # Create system mailbox directory
+ if os.name != 'nt':
+ maildir = os.path.join(cls.VMAIL_DIR, domain, username)
+ run_privileged(['mkdir', '-p', maildir])
+ run_privileged(['chown', '-R', f'{cls.VMAIL_UID}:{cls.VMAIL_GID}', maildir])
+
+ # Generate password hash for Dovecot
+ r = subprocess.run(
+ ['doveadm', 'pw', '-s', 'SHA512-CRYPT', '-p', password],
+ capture_output=True, text=True
+ )
+ if r.returncode == 0:
+ pw_hash = r.stdout.strip()
+ else:
+ # Fallback if doveadm not available yet
+ pw_hash = password
+
+ # Update virtual mailbox maps
+ cls._update_virtual_maps(email, domain, username, pw_hash)
+
+ account = EmailAccount(
+ email=email,
+ domain=domain,
+ username=username,
+ quota_mb=quota_mb,
+ enabled=True,
+ )
+ db.session.add(account)
+ db.session.commit()
+
+ return {'success': True, 'data': account.to_dict(),
+ 'message': f'Account {email} created'}
+ except Exception as e:
+ db.session.rollback()
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def update_account(cls, account_id: int, **kwargs) -> Dict:
+ """Update an email account."""
+ from app.models import EmailAccount
+ from app import db
+
+ try:
+ account = EmailAccount.query.get(account_id)
+ if not account:
+ return {'success': False, 'error': 'Account not found'}
+
+ password = kwargs.pop('password', None)
+
+ for key, value in kwargs.items():
+ if hasattr(account, key):
+ setattr(account, key, value)
+
+ # Update password if provided
+ if password and os.name != 'nt':
+ r = subprocess.run(
+ ['doveadm', 'pw', '-s', 'SHA512-CRYPT', '-p', password],
+ capture_output=True, text=True
+ )
+ if r.returncode == 0:
+ cls._update_virtual_maps(
+ account.email, account.domain,
+ account.username, r.stdout.strip()
+ )
+
+ db.session.commit()
+ return {'success': True, 'data': account.to_dict()}
+ except Exception as e:
+ db.session.rollback()
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def delete_account(cls, account_id: int) -> Dict:
+ """Delete an email account."""
+ from app.models import EmailAccount
+ from app import db
+
+ try:
+ account = EmailAccount.query.get(account_id)
+ if not account:
+ return {'success': False, 'error': 'Account not found'}
+
+ email = account.email
+
+ # Remove mailbox directory
+ if os.name != 'nt':
+ maildir = os.path.join(cls.VMAIL_DIR, account.domain, account.username)
+ if os.path.exists(maildir):
+ run_privileged(['rm', '-rf', maildir])
+
+ db.session.delete(account)
+ db.session.commit()
+
+ # Rebuild virtual maps
+ cls._rebuild_virtual_maps()
+
+ return {'success': True, 'message': f'Account {email} deleted'}
+ except Exception as e:
+ db.session.rollback()
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def _update_virtual_maps(cls, email: str, domain: str,
+ username: str, pw_hash: str) -> None:
+ """Update Postfix virtual mailbox maps and Dovecot passwd file."""
+ # Ensure domain is in virtual_domains
+ domains_file = cls.VIRTUAL_MAILBOX_DOMAINS
+ if os.path.exists(domains_file):
+ with open(domains_file, 'r') as f:
+ domains = f.read()
+ else:
+ domains = ''
+
+ if domain not in domains:
+ run_privileged(f'echo "{domain}" >> {domains_file}', shell=True)
+
+ # Add to virtual mailbox maps
+ mailbox_entry = f'{email} {domain}/{username}/\n'
+ maps_file = cls.VIRTUAL_MAILBOX_MAPS
+ cls._add_or_update_line(maps_file, email, mailbox_entry)
+
+ # Update Dovecot virtual users passwd file
+ passwd_file = f'/etc/dovecot/users'
+ passwd_entry = (f'{email}:{pw_hash}:{cls.VMAIL_UID}:{cls.VMAIL_GID}::'
+ f'{cls.VMAIL_DIR}/{domain}/{username}::\n')
+ cls._add_or_update_line(passwd_file, email, passwd_entry)
+
+ # Rebuild postfix maps
+ run_privileged(['postmap', maps_file])
+
+ @classmethod
+ def _add_or_update_line(cls, filepath: str, match_key: str, new_line: str) -> None:
+ """Add or update a line in a file matching the key."""
+ lines = []
+ found = False
+
+ if os.path.exists(filepath):
+ r = run_privileged(['cat', filepath])
+ if r.returncode == 0:
+ lines = r.stdout.split('\n')
+
+ updated = []
+ for line in lines:
+ if line.startswith(match_key):
+ updated.append(new_line.rstrip())
+ found = True
+ elif line.strip():
+ updated.append(line)
+
+ if not found:
+ updated.append(new_line.rstrip())
+
+ content = '\n'.join(updated) + '\n'
+ run_privileged(f'echo "{content}" > {filepath}', shell=True)
+
+ @classmethod
+ def _rebuild_virtual_maps(cls) -> None:
+ """Rebuild virtual maps from database."""
+ from app.models import EmailAccount
+
+ accounts = EmailAccount.query.filter_by(enabled=True).all()
+
+ # Rebuild domains file
+ domains = set(a.domain for a in accounts)
+ if domains:
+ content = '\n'.join(domains) + '\n'
+ run_privileged(f"printf '%s' '{content}' > {cls.VIRTUAL_MAILBOX_DOMAINS}", shell=True)
+
+ # Rebuild mailbox maps
+ maps_lines = []
+ for a in accounts:
+ maps_lines.append(f'{a.email} {a.domain}/{a.username}/')
+ if maps_lines:
+ content = '\n'.join(maps_lines) + '\n'
+ run_privileged(f"printf '%s' '{content}' > {cls.VIRTUAL_MAILBOX_MAPS}", shell=True)
+ run_privileged(['postmap', cls.VIRTUAL_MAILBOX_MAPS])
+
+ # ==========================================
+ # FORWARDING
+ # ==========================================
+
+ @classmethod
+ def set_forwarding(cls, account_id: int, forward_to: str,
+ keep_copy: bool = True) -> Dict:
+ """Set email forwarding for an account."""
+ from app.models import EmailAccount
+ from app import db
+
+ try:
+ account = EmailAccount.query.get(account_id)
+ if not account:
+ return {'success': False, 'error': 'Account not found'}
+
+ account.forward_to = forward_to
+ account.forward_keep_copy = keep_copy
+
+ # Update Postfix virtual alias maps
+ if os.name != 'nt' and forward_to:
+ aliases = forward_to
+ if keep_copy:
+ aliases = f'{account.email}, {forward_to}'
+ entry = f'{account.email} {aliases}\n'
+ cls._add_or_update_line(cls.VIRTUAL_ALIAS_MAPS, account.email, entry)
+ run_privileged(['postmap', cls.VIRTUAL_ALIAS_MAPS])
+
+ db.session.commit()
+ return {'success': True, 'data': account.to_dict()}
+ except Exception as e:
+ db.session.rollback()
+ return {'success': False, 'error': str(e)}
+
+ # ==========================================
+ # SPAMASSASSIN
+ # ==========================================
+
+ @classmethod
+ def install_spamassassin(cls) -> Dict:
+ """Install SpamAssassin."""
+ try:
+ if os.name == 'nt':
+ return {'success': False, 'error': 'SpamAssassin requires Linux'}
+
+ result = PackageManager.install(['spamassassin', 'spamc'])
+ if result.returncode != 0:
+ return {'success': False, 'error': result.stderr or 'Installation failed'}
+
+ ServiceControl.enable('spamassassin')
+ ServiceControl.start('spamassassin')
+
+ # Update SpamAssassin rules
+ run_privileged(['sa-update'], timeout=120)
+
+ return {'success': True, 'message': 'SpamAssassin installed successfully'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def get_spamassassin_config(cls) -> Dict:
+ """Get SpamAssassin configuration."""
+ config = {
+ 'required_score': 5.0,
+ 'rewrite_header_subject': '***SPAM***',
+ 'use_bayes': True,
+ 'bayes_auto_learn': True,
+ }
+
+ local_cf = '/etc/spamassassin/local.cf'
+ try:
+ if os.path.exists(local_cf):
+ r = run_privileged(['cat', local_cf])
+ if r.returncode == 0:
+ for line in r.stdout.split('\n'):
+ line = line.strip()
+ if line.startswith('#') or not line:
+ continue
+ parts = line.split(None, 1)
+ if len(parts) == 2:
+ key, value = parts
+ if key == 'required_score':
+ config['required_score'] = float(value)
+ elif key == 'rewrite_header':
+ config['rewrite_header_subject'] = value.replace('Subject ', '')
+ elif key == 'use_bayes':
+ config['use_bayes'] = value == '1'
+ elif key == 'bayes_auto_learn':
+ config['bayes_auto_learn'] = value == '1'
+ except Exception:
+ pass
+
+ return {'success': True, 'config': config}
+
+ @classmethod
+ def update_spamassassin_config(cls, settings: Dict) -> Dict:
+ """Update SpamAssassin configuration."""
+ try:
+ local_cf = '/etc/spamassassin/local.cf'
+ lines = []
+
+ if 'required_score' in settings:
+ lines.append(f"required_score {settings['required_score']}")
+ if 'rewrite_header_subject' in settings:
+ lines.append(f"rewrite_header Subject {settings['rewrite_header_subject']}")
+ if 'use_bayes' in settings:
+ lines.append(f"use_bayes {'1' if settings['use_bayes'] else '0'}")
+ if 'bayes_auto_learn' in settings:
+ lines.append(f"bayes_auto_learn {'1' if settings['bayes_auto_learn'] else '0'}")
+
+ content = '\n'.join(lines) + '\n'
+ run_privileged(f"printf '%s' '{content}' > {local_cf}", shell=True)
+
+ ServiceControl.restart('spamassassin')
+ return {'success': True, 'message': 'SpamAssassin configuration updated'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ # ==========================================
+ # DKIM / SPF / DMARC
+ # ==========================================
+
+ @classmethod
+ def install_dkim(cls) -> Dict:
+ """Install OpenDKIM."""
+ try:
+ if os.name == 'nt':
+ return {'success': False, 'error': 'OpenDKIM requires Linux'}
+
+ manager = PackageManager.detect()
+ packages = ['opendkim', 'opendkim-tools'] if manager == 'apt' else ['opendkim']
+
+ result = PackageManager.install(packages)
+ if result.returncode != 0:
+ return {'success': False, 'error': result.stderr or 'Installation failed'}
+
+ # Create directories
+ run_privileged(['mkdir', '-p', cls.DKIM_KEY_DIR])
+ run_privileged(['chown', '-R', 'opendkim:opendkim', '/etc/opendkim'])
+
+ # Create initial config files
+ for filepath in [cls.DKIM_KEY_TABLE, cls.DKIM_SIGNING_TABLE, cls.DKIM_TRUSTED_HOSTS]:
+ if not os.path.exists(filepath):
+ run_privileged(f'touch {filepath}', shell=True)
+ run_privileged(['chown', 'opendkim:opendkim', filepath])
+
+ # Set trusted hosts
+ run_privileged(
+ f"printf '127.0.0.1\\nlocalhost\\n' > {cls.DKIM_TRUSTED_HOSTS}",
+ shell=True
+ )
+
+ ServiceControl.enable('opendkim')
+ ServiceControl.start('opendkim')
+
+ return {'success': True, 'message': 'OpenDKIM installed successfully'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def generate_dkim_key(cls, domain: str, selector: str = 'mail') -> Dict:
+ """Generate DKIM keys for a domain."""
+ try:
+ if os.name == 'nt':
+ return {'success': False, 'error': 'OpenDKIM requires Linux'}
+
+ if not re.match(r'^[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', domain):
+ return {'success': False, 'error': 'Invalid domain'}
+ if not re.match(r'^[a-zA-Z0-9]+$', selector):
+ return {'success': False, 'error': 'Invalid selector'}
+
+ key_dir = os.path.join(cls.DKIM_KEY_DIR, domain)
+ run_privileged(['mkdir', '-p', key_dir])
+
+ # Generate key pair
+ run_privileged([
+ 'opendkim-genkey',
+ '-b', '2048',
+ '-d', domain,
+ '-D', key_dir,
+ '-s', selector,
+ '-v'
+ ])
+
+ run_privileged(['chown', '-R', 'opendkim:opendkim', key_dir])
+
+ # Read the public key for DNS record
+ txt_file = os.path.join(key_dir, f'{selector}.txt')
+ r = run_privileged(['cat', txt_file])
+ dns_record = r.stdout.strip() if r.returncode == 0 else ''
+
+ # Update key table
+ key_entry = f'{selector}._domainkey.{domain} {domain}:{selector}:{key_dir}/{selector}.private\n'
+ cls._add_or_update_line(cls.DKIM_KEY_TABLE, f'{selector}._domainkey.{domain}', key_entry)
+
+ # Update signing table
+ sign_entry = f'*@{domain} {selector}._domainkey.{domain}\n'
+ cls._add_or_update_line(cls.DKIM_SIGNING_TABLE, f'*@{domain}', sign_entry)
+
+ # Add domain to trusted hosts
+ run_privileged(f'echo "{domain}" >> {cls.DKIM_TRUSTED_HOSTS}', shell=True)
+
+ ServiceControl.restart('opendkim')
+
+ return {
+ 'success': True,
+ 'dns_record': dns_record,
+ 'selector': selector,
+ 'message': f'DKIM key generated for {domain}'
+ }
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def get_dns_records(cls, domain: str) -> Dict:
+ """Get recommended DNS records for email authentication (SPF, DKIM, DMARC)."""
+ records = []
+
+ # SPF record
+ records.append({
+ 'type': 'TXT',
+ 'name': domain,
+ 'value': 'v=spf1 mx a ~all',
+ 'purpose': 'SPF - Authorize this server to send email',
+ })
+
+ # DMARC record
+ records.append({
+ 'type': 'TXT',
+ 'name': f'_dmarc.{domain}',
+ 'value': f'v=DMARC1; p=quarantine; rua=mailto:dmarc@{domain}; pct=100',
+ 'purpose': 'DMARC - Email authentication policy',
+ })
+
+ # DKIM record (read from generated key)
+ if os.name != 'nt':
+ key_dir = os.path.join(cls.DKIM_KEY_DIR, domain)
+ txt_files = []
+ try:
+ r = run_privileged(['ls', key_dir])
+ if r.returncode == 0:
+ txt_files = [f for f in r.stdout.split() if f.endswith('.txt')]
+ except Exception:
+ pass
+
+ for txt_file in txt_files:
+ selector = txt_file.replace('.txt', '')
+ r = run_privileged(['cat', os.path.join(key_dir, txt_file)])
+ if r.returncode == 0:
+ records.append({
+ 'type': 'TXT',
+ 'name': f'{selector}._domainkey.{domain}',
+ 'value': r.stdout.strip(),
+ 'purpose': f'DKIM - Email signing ({selector} selector)',
+ })
+
+ # MX record
+ records.append({
+ 'type': 'MX',
+ 'name': domain,
+ 'value': f'10 mail.{domain}',
+ 'purpose': 'MX - Direct email to this server',
+ })
+
+ return {'success': True, 'records': records}
+
+ # ==========================================
+ # SERVICE CONTROL
+ # ==========================================
+
+ @classmethod
+ def start_service(cls, service: str) -> Dict:
+ """Start an email service."""
+ allowed = {'postfix', 'dovecot', 'spamassassin', 'opendkim'}
+ if service not in allowed:
+ return {'success': False, 'error': f'Unknown service: {service}'}
+
+ try:
+ r = ServiceControl.start(service)
+ if r.returncode != 0:
+ return {'success': False, 'error': r.stderr or f'Failed to start {service}'}
+ return {'success': True, 'message': f'{service} started'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def stop_service(cls, service: str) -> Dict:
+ """Stop an email service."""
+ allowed = {'postfix', 'dovecot', 'spamassassin', 'opendkim'}
+ if service not in allowed:
+ return {'success': False, 'error': f'Unknown service: {service}'}
+
+ try:
+ r = ServiceControl.stop(service)
+ if r.returncode != 0:
+ return {'success': False, 'error': r.stderr or f'Failed to stop {service}'}
+ return {'success': True, 'message': f'{service} stopped'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def restart_service(cls, service: str) -> Dict:
+ """Restart an email service."""
+ allowed = {'postfix', 'dovecot', 'spamassassin', 'opendkim'}
+ if service not in allowed:
+ return {'success': False, 'error': f'Unknown service: {service}'}
+
+ try:
+ r = ServiceControl.restart(service)
+ if r.returncode != 0:
+ return {'success': False, 'error': r.stderr or f'Failed to restart {service}'}
+ return {'success': True, 'message': f'{service} restarted'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ # ==========================================
+ # WEBMAIL
+ # ==========================================
+
+ @classmethod
+ def get_webmail_status(cls) -> Dict:
+ """Check if Roundcube webmail is installed."""
+ if os.name == 'nt':
+ return {'installed': False, 'url': None}
+
+ installed = (
+ os.path.exists('/var/www/roundcube') or
+ os.path.exists('/usr/share/roundcube') or
+ PackageManager.is_installed('roundcube')
+ )
+
+ return {
+ 'installed': installed,
+ 'url': '/webmail' if installed else None,
+ }
+
+ @classmethod
+ def install_webmail(cls) -> Dict:
+ """Install Roundcube webmail."""
+ try:
+ if os.name == 'nt':
+ return {'success': False, 'error': 'Roundcube requires Linux'}
+
+ result = PackageManager.install(['roundcube', 'roundcube-plugins'])
+ if result.returncode != 0:
+ return {'success': False, 'error': result.stderr or 'Installation failed'}
+
+ return {'success': True, 'message': 'Roundcube webmail installed'}
+ except Exception as e:
+ return {'success': False, 'error': str(e)}
+
+ # ==========================================
+ # MAIL LOGS
+ # ==========================================
+
+ @classmethod
+ def get_mail_log(cls, lines: int = 100) -> Dict:
+ """Get recent mail log entries."""
+ log_paths = ['/var/log/mail.log', '/var/log/maillog']
+
+ for log_path in log_paths:
+ if os.path.exists(log_path):
+ try:
+ r = run_privileged(['tail', '-n', str(lines), log_path])
+ if r.returncode == 0:
+ log_lines = [l for l in r.stdout.split('\n') if l.strip()]
+ return {'success': True, 'lines': log_lines,
+ 'source': log_path}
+ except Exception:
+ continue
+
+ # Try journalctl fallback
+ try:
+ r = run_privileged(['journalctl', '-u', 'postfix', '-n', str(lines), '--no-pager'])
+ if r.returncode == 0:
+ log_lines = [l for l in r.stdout.split('\n') if l.strip()]
+ return {'success': True, 'lines': log_lines,
+ 'source': 'journalctl'}
+ except Exception:
+ pass
+
+ return {'success': True, 'lines': [], 'source': None}
diff --git a/frontend/index.html b/frontend/index.html
index 4db11b3..c1fcdfa 100644
--- a/frontend/index.html
+++ b/frontend/index.html
@@ -10,10 +10,22 @@
ServerKit
diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx
index cc58bfd..bbddce4 100644
--- a/frontend/src/App.jsx
+++ b/frontend/src/App.jsx
@@ -35,6 +35,7 @@ import WordPressDetail from './pages/WordPressDetail';
import WordPressProjects from './pages/WordPressProjects';
import WordPressProject from './pages/WordPressProject';
import SSLCertificates from './pages/SSLCertificates';
+import Email from './pages/Email';
// Page title mapping
const PAGE_TITLES = {
@@ -60,6 +61,7 @@ const PAGE_TITLES = {
'/backups': 'Backups',
'/cron': 'Cron Jobs',
'/security': 'Security',
+ '/email': 'Email Server',
'/terminal': 'Terminal',
'/settings': 'Settings',
};
@@ -197,6 +199,8 @@ function AppRoutes() {
} />
} />
} />
+ } />
+ } />
} />
} />
} />
diff --git a/frontend/src/components/Sidebar.jsx b/frontend/src/components/Sidebar.jsx
index dd93a74..ce6ccef 100644
--- a/frontend/src/components/Sidebar.jsx
+++ b/frontend/src/components/Sidebar.jsx
@@ -250,6 +250,13 @@ const Sidebar = () => {
Security
+ `nav-item ${isActive ? 'active' : ''}`}>
+
+
+
+
+ Email Server
+
System
diff --git a/frontend/src/contexts/ThemeContext.jsx b/frontend/src/contexts/ThemeContext.jsx
index 6b9daea..40461d2 100644
--- a/frontend/src/contexts/ThemeContext.jsx
+++ b/frontend/src/contexts/ThemeContext.jsx
@@ -2,6 +2,8 @@ import React, { createContext, useContext, useState, useEffect, useCallback } fr
const ThemeContext = createContext(null);
+const DEFAULT_ACCENT = '#6366f1';
+
// Get the resolved theme based on current setting and OS preference
function getResolvedTheme(theme) {
if (theme === 'system') {
@@ -10,6 +12,40 @@ function getResolvedTheme(theme) {
return theme;
}
+// Convert hex to { r, g, b }
+function hexToRgb(hex) {
+ const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
+ if (!result) return { r: 99, g: 102, b: 241 };
+ return {
+ r: parseInt(result[1], 16),
+ g: parseInt(result[2], 16),
+ b: parseInt(result[3], 16),
+ };
+}
+
+// Derive accent color variants from a hex color
+function deriveAccentVariants(hex) {
+ const { r, g, b } = hexToRgb(hex);
+ // Darken by ~12% for hover
+ const darken = (v) => Math.max(0, Math.round(v * 0.88));
+ return {
+ primary: hex,
+ hover: `#${darken(r).toString(16).padStart(2, '0')}${darken(g).toString(16).padStart(2, '0')}${darken(b).toString(16).padStart(2, '0')}`,
+ glow: `rgba(${r}, ${g}, ${b}, 0.15)`,
+ shadow: `rgba(${r}, ${g}, ${b}, 0.3)`,
+ };
+}
+
+// Apply accent CSS custom properties to the document
+function applyAccentToDOM(hex) {
+ const variants = deriveAccentVariants(hex);
+ const style = document.documentElement.style;
+ style.setProperty('--accent-primary', variants.primary);
+ style.setProperty('--accent-hover', variants.hover);
+ style.setProperty('--accent-glow', variants.glow);
+ style.setProperty('--accent-shadow', variants.shadow);
+}
+
export function ThemeProvider({ children }) {
const [theme, setThemeState] = useState(() => {
return localStorage.getItem('theme') || 'dark';
@@ -20,6 +56,10 @@ export function ThemeProvider({ children }) {
return getResolvedTheme(stored);
});
+ const [accentColor, setAccentColorState] = useState(() => {
+ return localStorage.getItem('accent_color') || DEFAULT_ACCENT;
+ });
+
// Update the DOM attribute and resolved theme
const applyTheme = useCallback((newTheme) => {
document.documentElement.setAttribute('data-theme', newTheme);
@@ -33,6 +73,13 @@ export function ThemeProvider({ children }) {
applyTheme(newTheme);
}, [applyTheme]);
+ // Public setter for accent color
+ const setAccentColor = useCallback((hex) => {
+ setAccentColorState(hex);
+ localStorage.setItem('accent_color', hex);
+ applyAccentToDOM(hex);
+ }, []);
+
// Listen for OS theme changes when using 'system' theme
useEffect(() => {
if (theme !== 'system') return;
@@ -47,15 +94,18 @@ export function ThemeProvider({ children }) {
return () => mediaQuery.removeEventListener('change', handleChange);
}, [theme]);
- // Apply theme on mount (handles cases where FOUC script didn't run)
+ // Apply theme and accent on mount
useEffect(() => {
applyTheme(theme);
- }, [theme, applyTheme]);
+ applyAccentToDOM(accentColor);
+ }, [theme, applyTheme, accentColor]);
const value = {
theme, // Current setting: 'dark' | 'light' | 'system'
resolvedTheme, // Actual appearance: 'dark' | 'light'
setTheme, // Function to change theme
+ accentColor, // Current accent hex color
+ setAccentColor, // Function to change accent color
};
return (
diff --git a/frontend/src/hooks/useDashboardLayout.js b/frontend/src/hooks/useDashboardLayout.js
new file mode 100644
index 0000000..2a94b54
--- /dev/null
+++ b/frontend/src/hooks/useDashboardLayout.js
@@ -0,0 +1,85 @@
+import { useState, useCallback } from 'react';
+
+const STORAGE_KEY = 'dashboard_layout';
+
+const DEFAULT_WIDGETS = [
+ { id: 'cpu', label: 'CPU', visible: true },
+ { id: 'ram', label: 'RAM', visible: true },
+ { id: 'network', label: 'Network', visible: true },
+ { id: 'disk', label: 'Disk', visible: true },
+ { id: 'chart', label: 'Metrics Chart', visible: true },
+ { id: 'specs', label: 'Quick Actions & Specs', visible: true },
+ { id: 'processes', label: 'Processes / Containers', visible: true },
+];
+
+function loadWidgets() {
+ try {
+ const stored = JSON.parse(localStorage.getItem(STORAGE_KEY));
+ if (!Array.isArray(stored)) return DEFAULT_WIDGETS.map(w => ({ ...w }));
+
+ // Merge with defaults to handle new widgets added in future versions
+ const storedMap = new Map(stored.map(w => [w.id, w]));
+ const merged = [];
+
+ // Keep stored order for known widgets
+ for (const sw of stored) {
+ const def = DEFAULT_WIDGETS.find(d => d.id === sw.id);
+ if (def) {
+ merged.push({ ...def, visible: sw.visible });
+ }
+ }
+
+ // Append any new defaults not in stored
+ for (const dw of DEFAULT_WIDGETS) {
+ if (!storedMap.has(dw.id)) {
+ merged.push({ ...dw });
+ }
+ }
+
+ return merged;
+ } catch {
+ return DEFAULT_WIDGETS.map(w => ({ ...w }));
+ }
+}
+
+function saveWidgets(widgets) {
+ localStorage.setItem(STORAGE_KEY, JSON.stringify(widgets.map(({ id, visible }) => ({ id, visible }))));
+}
+
+export default function useDashboardLayout() {
+ const [widgets, setWidgets] = useState(loadWidgets);
+
+ const toggleWidget = useCallback((id) => {
+ setWidgets(prev => {
+ const next = prev.map(w => w.id === id ? { ...w, visible: !w.visible } : w);
+ saveWidgets(next);
+ return next;
+ });
+ }, []);
+
+ const moveWidget = useCallback((id, direction) => {
+ setWidgets(prev => {
+ const idx = prev.findIndex(w => w.id === id);
+ if (idx < 0) return prev;
+ const swapIdx = direction === 'up' ? idx - 1 : idx + 1;
+ if (swapIdx < 0 || swapIdx >= prev.length) return prev;
+ const next = [...prev];
+ [next[idx], next[swapIdx]] = [next[swapIdx], next[idx]];
+ saveWidgets(next);
+ return next;
+ });
+ }, []);
+
+ const resetLayout = useCallback(() => {
+ const fresh = DEFAULT_WIDGETS.map(w => ({ ...w }));
+ saveWidgets(fresh);
+ setWidgets(fresh);
+ }, []);
+
+ const isVisible = useCallback((id) => {
+ const w = widgets.find(w => w.id === id);
+ return w ? w.visible : true;
+ }, [widgets]);
+
+ return { widgets, toggleWidget, moveWidget, resetLayout, isVisible };
+}
diff --git a/frontend/src/pages/Dashboard.jsx b/frontend/src/pages/Dashboard.jsx
index f1799f4..7124167 100644
--- a/frontend/src/pages/Dashboard.jsx
+++ b/frontend/src/pages/Dashboard.jsx
@@ -8,6 +8,7 @@ import {
import api from '../services/api';
import { useMetrics } from '../hooks/useMetrics';
import MetricsGraph from '../components/MetricsGraph';
+import useDashboardLayout from '../hooks/useDashboardLayout';
// Refresh interval options in seconds
const REFRESH_OPTIONS = [
@@ -21,6 +22,7 @@ const REFRESH_OPTIONS = [
const Dashboard = () => {
const navigate = useNavigate();
const { metrics, loading: metricsLoading, connected, refresh: refreshMetrics } = useMetrics(true);
+ const { widgets } = useDashboardLayout();
const [apps, setApps] = useState([]);
const [services, setServices] = useState([]);
const [dbStatus, setDbStatus] = useState(null);
@@ -218,147 +220,156 @@ const Dashboard = () => {
{/* Grid Container */}
- {/* Metric Tiles */}
-
-
- CPU
-
-
-
{(metrics?.cpu?.percent || 0).toFixed(1)}%
-
- Cores: {metrics?.cpu?.count_logical || 0}
- 50 ? 'trend-up' : 'trend-down'}>
- {metrics?.cpu?.percent > 50 ? '▲' : '▼'} {Math.abs(metrics?.cpu?.percent - 50).toFixed(0)}%
-
-
-
-
-
-
- RAM
-
-
-
{metrics?.memory?.ram?.used_human || '0 GB'}
-
- Total: {metrics?.memory?.ram?.total_human || '0 GB'}
- Cached: {metrics?.memory?.ram?.cached_human || '0 GB'}
-
-
+ {widgets.filter(w => w.visible).map(w => {
+ const WIDGET_RENDERERS = {
+ cpu: () => (
+
+
+ CPU
+
+
+
{(metrics?.cpu?.percent || 0).toFixed(1)}%
+
+ Cores: {metrics?.cpu?.count_logical || 0}
+ 50 ? 'trend-up' : 'trend-down'}>
+ {metrics?.cpu?.percent > 50 ? '▲' : '▼'} {Math.abs(metrics?.cpu?.percent - 50).toFixed(0)}%
+
+
+
+ ),
+ ram: () => (
+
+
+ RAM
+
+
+
{metrics?.memory?.ram?.used_human || '0 GB'}
+
+ Total: {metrics?.memory?.ram?.total_human || '0 GB'}
+ Cached: {metrics?.memory?.ram?.cached_human || '0 GB'}
+
+
+ ),
+ network: () => (
+
+
+
+ {metrics?.network?.io?.bytes_sent_human || '0 B'}
+ sent
+
+
+ In: {metrics?.network?.io?.bytes_recv_human || '0 B'}
+ Out: {metrics?.network?.io?.bytes_sent_human || '0 B'}
+
+
+ ),
+ disk: () => (
+
+
+ Disk
+
+
+
+ {(metrics?.disk?.partitions?.[0]?.percent || 0).toFixed(1)}%
+ used
+
+
+ Used: {metrics?.disk?.partitions?.[0]?.used_human || '0 GB'}
+ Free: {metrics?.disk?.partitions?.[0]?.free_human || '0 GB'}
+
+
+ ),
+ chart: () => (
+
+
+
+ ),
+ specs: () => (
+
+
Quick Actions
+
navigate('/docker')}>
+ Restart Services
+ ►
+
+
navigate('/databases')}>
+ Clear Cache
+
+
+
navigate('/ssl')}>
+ Rotate SSL Certs
+
+
-
-
-
- {metrics?.network?.io?.bytes_sent_human || '0 B'}
- sent
-
-
- In: {metrics?.network?.io?.bytes_recv_human || '0 B'}
- Out: {metrics?.network?.io?.bytes_sent_human || '0 B'}
-
-
-
-
-
- Disk
-
-
-
- {(metrics?.disk?.partitions?.[0]?.percent || 0).toFixed(1)}%
- used
-
-
- Used: {metrics?.disk?.partitions?.[0]?.used_human || '0 GB'}
- Free: {metrics?.disk?.partitions?.[0]?.free_human || '0 GB'}
-
-
-
- {/* Chart Panel */}
-
-
-
-
- {/* Spec Panel */}
-
-
Quick Actions
-
navigate('/docker')}>
- Restart Services
- ►
-
-
navigate('/databases')}>
- Clear Cache
-
-
-
navigate('/ssl')}>
- Rotate SSL Certs
-
-
-
-
Hardware Specs
-
- Processor
- {systemInfo?.cpu?.model || 'N/A'}
-
-
- Architecture
- {systemInfo?.cpu?.architecture || 'N/A'}
-
-
- Swap Memory
- {metrics?.memory?.swap?.total_human || 'N/A'}
-
-
-
- {/* Process Table */}
-
-
- Active Processes / Containers
-
-
-
-
-
-
-
- ID
- Name
- Type
- Status
- Domain
-
-
-
- {apps.length === 0 ? (
-
-
- No applications found
-
-
- ) : (
- apps.slice(0, 6).map(app => (
- navigate(`/apps/${app.id}`)} style={{ cursor: 'pointer' }}>
- {app.id}
-
-
- {getStackIcon(app.app_type)}
- {app.name}
-
-
- {app.app_type}
-
-
- {app.status?.toUpperCase()}
-
-
- {app.domains?.[0]?.name || '-'}
-
- ))
- )}
-
-
-
+
Hardware Specs
+
+ Processor
+ {systemInfo?.cpu?.model || 'N/A'}
+
+
+ Architecture
+ {systemInfo?.cpu?.architecture || 'N/A'}
+
+
+ Swap Memory
+ {metrics?.memory?.swap?.total_human || 'N/A'}
+
+
+ ),
+ processes: () => (
+
+
+ Active Processes / Containers
+
+
+
+
+
+
+
+ ID
+ Name
+ Type
+ Status
+ Domain
+
+
+
+ {apps.length === 0 ? (
+
+
+ No applications found
+
+
+ ) : (
+ apps.slice(0, 6).map(app => (
+ navigate(`/apps/${app.id}`)} style={{ cursor: 'pointer' }}>
+ {app.id}
+
+
+ {getStackIcon(app.app_type)}
+ {app.name}
+
+
+ {app.app_type}
+
+
+ {app.status?.toUpperCase()}
+
+
+ {app.domains?.[0]?.name || '-'}
+
+ ))
+ )}
+
+
+
+ ),
+ };
+ return WIDGET_RENDERERS[w.id]?.();
+ })}
);
diff --git a/frontend/src/pages/Email.jsx b/frontend/src/pages/Email.jsx
new file mode 100644
index 0000000..2b81d02
--- /dev/null
+++ b/frontend/src/pages/Email.jsx
@@ -0,0 +1,1164 @@
+import { useState, useEffect } from 'react';
+import useTabParam from '../hooks/useTabParam';
+import api from '../services/api';
+import { useToast } from '../contexts/ToastContext';
+
+const VALID_TABS = ['overview', 'accounts', 'postfix', 'dovecot', 'spam', 'authentication', 'queue', 'webmail', 'logs'];
+
+const Email = () => {
+ const [activeTab, setActiveTab] = useTabParam('/email', VALID_TABS);
+ const [status, setStatus] = useState(null);
+ const [loading, setLoading] = useState(true);
+
+ useEffect(() => {
+ loadStatus();
+ }, []);
+
+ async function loadStatus() {
+ try {
+ const data = await api.getEmailStatus();
+ setStatus(data);
+ } catch (err) {
+ console.error('Failed to load email status:', err);
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ if (loading) {
+ return
Loading email server status...
;
+ }
+
+ return (
+
+
+
+
Email Server
+
Manage Postfix, Dovecot, spam filtering, and email authentication
+
+
+
+
+ setActiveTab('overview')}>
+ Overview
+
+ setActiveTab('accounts')}>
+ Accounts
+
+ setActiveTab('postfix')}>
+ Postfix (SMTP)
+
+ setActiveTab('dovecot')}>
+ Dovecot (IMAP)
+
+ setActiveTab('spam')}>
+ Spam Filter
+
+ setActiveTab('authentication')}>
+ DKIM/SPF/DMARC
+
+ setActiveTab('queue')}>
+ Mail Queue
+
+ setActiveTab('webmail')}>
+ Webmail
+
+ setActiveTab('logs')}>
+ Logs
+
+
+
+
+ {activeTab === 'overview' &&
}
+ {activeTab === 'accounts' &&
}
+ {activeTab === 'postfix' &&
}
+ {activeTab === 'dovecot' &&
}
+ {activeTab === 'spam' &&
}
+ {activeTab === 'authentication' &&
}
+ {activeTab === 'queue' &&
}
+ {activeTab === 'webmail' &&
}
+ {activeTab === 'logs' &&
}
+
+
+ );
+};
+
+
+// ==========================================
+// OVERVIEW TAB
+// ==========================================
+
+const OverviewTab = ({ status, onRefresh }) => {
+ const { showToast } = useToast();
+
+ const services = [
+ { key: 'postfix', label: 'Postfix', desc: 'SMTP mail transfer agent', data: status?.postfix },
+ { key: 'dovecot', label: 'Dovecot', desc: 'IMAP/POP3 server', data: status?.dovecot },
+ { key: 'spamassassin', label: 'SpamAssassin', desc: 'Spam filtering engine', data: status?.spamassassin },
+ { key: 'opendkim', label: 'OpenDKIM', desc: 'DKIM email signing', data: status?.opendkim },
+ ];
+
+ async function handleServiceAction(service, action) {
+ try {
+ if (action === 'start') await api.startEmailService(service);
+ else if (action === 'stop') await api.stopEmailService(service);
+ else if (action === 'restart') await api.restartEmailService(service);
+ showToast(`${service} ${action}ed successfully`, 'success');
+ onRefresh();
+ } catch (err) {
+ showToast(err.message || `Failed to ${action} ${service}`, 'error');
+ }
+ }
+
+ return (
+
+
+ {services.map(svc => (
+
+
+
+
+
{svc.label}
+
{svc.desc}
+
+
+ {svc.data?.running ? 'Running' : svc.data?.installed ? 'Stopped' : 'Not Installed'}
+
+
+
+ {svc.data?.version && (
+
+ Version: {svc.data.version}
+
+ )}
+
+ {svc.data?.installed && (
+
+ {svc.data.running ? (
+ <>
+ handleServiceAction(svc.key, 'restart')}>
+ Restart
+
+ handleServiceAction(svc.key, 'stop')}>
+ Stop
+
+ >
+ ) : (
+ handleServiceAction(svc.key, 'start')}>
+ Start
+
+ )}
+
+ )}
+
+
+ ))}
+
+
+ );
+};
+
+
+// ==========================================
+// ACCOUNTS TAB
+// ==========================================
+
+const AccountsTab = () => {
+ const { showToast } = useToast();
+ const [accounts, setAccounts] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [showCreate, setShowCreate] = useState(false);
+ const [editingForward, setEditingForward] = useState(null);
+ const [formData, setFormData] = useState({ email: '', password: '', domain: '', quota_mb: 1024 });
+ const [forwardData, setForwardData] = useState({ forward_to: '', keep_copy: true });
+
+ useEffect(() => {
+ loadAccounts();
+ }, []);
+
+ async function loadAccounts() {
+ try {
+ const data = await api.getEmailAccounts();
+ setAccounts(data.accounts || []);
+ } catch (err) {
+ showToast('Failed to load accounts', 'error');
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ async function handleCreate(e) {
+ e.preventDefault();
+ try {
+ const result = await api.createEmailAccount(formData);
+ if (result.success) {
+ showToast('Account created', 'success');
+ setShowCreate(false);
+ setFormData({ email: '', password: '', domain: '', quota_mb: 1024 });
+ loadAccounts();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ }
+ }
+
+ async function handleDelete(accountId) {
+ if (!confirm('Delete this email account? This cannot be undone.')) return;
+ try {
+ const result = await api.deleteEmailAccount(accountId);
+ if (result.success) {
+ showToast('Account deleted', 'success');
+ loadAccounts();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ }
+ }
+
+ async function handleToggle(account) {
+ try {
+ await api.updateEmailAccount(account.id, { enabled: !account.enabled });
+ showToast(`Account ${account.enabled ? 'disabled' : 'enabled'}`, 'success');
+ loadAccounts();
+ } catch (err) {
+ showToast(err.message, 'error');
+ }
+ }
+
+ async function handleForwarding(e) {
+ e.preventDefault();
+ try {
+ const result = await api.setEmailForwarding(editingForward, forwardData);
+ if (result.success) {
+ showToast('Forwarding updated', 'success');
+ setEditingForward(null);
+ loadAccounts();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ }
+ }
+
+ if (loading) return
Loading accounts...
;
+
+ return (
+
+
+
Email Accounts
+ setShowCreate(!showCreate)}>
+ {showCreate ? 'Cancel' : 'Create Account'}
+
+
+
+ {showCreate && (
+
+ )}
+
+ {accounts.length === 0 ? (
+
+
+
No email accounts configured yet.
+
+
+ ) : (
+
+
+
+
+ Email
+ Domain
+ Quota
+ Forwarding
+ Status
+ Actions
+
+
+
+ {accounts.map(account => (
+
+ {account.email}
+ {account.domain}
+ {account.quota_mb} MB
+ {account.forward_to || 'None'}
+
+
+ {account.enabled ? 'Active' : 'Disabled'}
+
+
+
+
+ handleToggle(account)}
+ >
+ {account.enabled ? 'Disable' : 'Enable'}
+
+ {
+ setEditingForward(account.id);
+ setForwardData({
+ forward_to: account.forward_to || '',
+ keep_copy: account.forward_keep_copy !== false,
+ });
+ }}
+ >
+ Forward
+
+ handleDelete(account.id)}
+ >
+ Delete
+
+
+
+
+ ))}
+
+
+
+ )}
+
+ {editingForward && (
+
setEditingForward(null)}>
+
e.stopPropagation()}>
+
+
Email Forwarding
+ setEditingForward(null)}>×
+
+
+
+
+ )}
+
+ );
+};
+
+
+// ==========================================
+// POSTFIX TAB
+// ==========================================
+
+const PostfixTab = ({ status, onRefresh }) => {
+ const { showToast } = useToast();
+ const [config, setConfig] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [installing, setInstalling] = useState(false);
+
+ useEffect(() => {
+ if (status?.postfix?.installed) loadConfig();
+ else setLoading(false);
+ }, [status]);
+
+ async function loadConfig() {
+ try {
+ const data = await api.getPostfixConfig();
+ setConfig(data.config || {});
+ } catch (err) {
+ console.error('Failed to load Postfix config:', err);
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ async function handleInstall() {
+ setInstalling(true);
+ try {
+ const result = await api.installPostfix();
+ if (result.success) {
+ showToast('Postfix installed', 'success');
+ onRefresh();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ } finally {
+ setInstalling(false);
+ }
+ }
+
+ async function handleSave() {
+ try {
+ const result = await api.updatePostfixConfig(config);
+ if (result.success) {
+ showToast('Configuration saved', 'success');
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ }
+ }
+
+ if (loading) return
Loading...
;
+
+ if (!status?.postfix?.installed) {
+ return (
+
+
+
Postfix Not Installed
+
Postfix is a high-performance mail transfer agent (MTA) used for sending and receiving email.
+
+ {installing ? 'Installing...' : 'Install Postfix'}
+
+
+
+ );
+ }
+
+ return (
+
+
+
+
Postfix Configuration
+
+
+ {config && (
+
+ {Object.entries(config).map(([key, value]) => (
+
+ {key}
+ setConfig({ ...config, [key]: e.target.value })}
+ />
+
+ ))}
+
+ )}
+
+ Save Configuration
+
+
+
+
+ );
+};
+
+
+// ==========================================
+// DOVECOT TAB
+// ==========================================
+
+const DovecotTab = ({ status, onRefresh }) => {
+ const { showToast } = useToast();
+ const [config, setConfig] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [installing, setInstalling] = useState(false);
+
+ useEffect(() => {
+ if (status?.dovecot?.installed) loadConfig();
+ else setLoading(false);
+ }, [status]);
+
+ async function loadConfig() {
+ try {
+ const data = await api.getDovecotConfig();
+ setConfig(data.config || {});
+ } catch (err) {
+ console.error('Failed to load Dovecot config:', err);
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ async function handleInstall() {
+ setInstalling(true);
+ try {
+ const result = await api.installDovecot();
+ if (result.success) {
+ showToast('Dovecot installed', 'success');
+ onRefresh();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ } finally {
+ setInstalling(false);
+ }
+ }
+
+ if (loading) return
Loading...
;
+
+ if (!status?.dovecot?.installed) {
+ return (
+
+
+
Dovecot Not Installed
+
Dovecot is an IMAP and POP3 server that allows email clients to access mailboxes.
+
+ {installing ? 'Installing...' : 'Install Dovecot'}
+
+
+
+ );
+ }
+
+ return (
+
+
+
+
Dovecot Configuration
+
+ {status?.dovecot?.running ? 'Running' : 'Stopped'}
+
+
+
+ {config && Object.keys(config).length > 0 ? (
+
+ {Object.entries(config).map(([key, value]) => (
+
+ {key}
+ {value}
+
+ ))}
+
+ ) : (
+
Using default Dovecot configuration.
+ )}
+
+
+
+ );
+};
+
+
+// ==========================================
+// SPAM FILTER TAB
+// ==========================================
+
+const SpamTab = ({ status, onRefresh }) => {
+ const { showToast } = useToast();
+ const [config, setConfig] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [installing, setInstalling] = useState(false);
+
+ useEffect(() => {
+ if (status?.spamassassin?.installed) loadConfig();
+ else setLoading(false);
+ }, [status]);
+
+ async function loadConfig() {
+ try {
+ const data = await api.getSpamAssassinConfig();
+ setConfig(data.config || {});
+ } catch (err) {
+ console.error('Failed to load SpamAssassin config:', err);
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ async function handleInstall() {
+ setInstalling(true);
+ try {
+ const result = await api.installSpamAssassin();
+ if (result.success) {
+ showToast('SpamAssassin installed', 'success');
+ onRefresh();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ } finally {
+ setInstalling(false);
+ }
+ }
+
+ async function handleSave() {
+ try {
+ const result = await api.updateSpamAssassinConfig(config);
+ if (result.success) {
+ showToast('SpamAssassin configuration saved', 'success');
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ }
+ }
+
+ if (loading) return
Loading...
;
+
+ if (!status?.spamassassin?.installed) {
+ return (
+
+
+
SpamAssassin Not Installed
+
SpamAssassin is a mail filter that identifies spam using content analysis and DNS blocklists.
+
+ {installing ? 'Installing...' : 'Install SpamAssassin'}
+
+
+
+ );
+ }
+
+ return (
+
+
+
+
SpamAssassin Configuration
+
+
+ {config && (
+
+ )}
+
+ Save Configuration
+
+
+
+
+ );
+};
+
+
+// ==========================================
+// AUTHENTICATION TAB (DKIM/SPF/DMARC)
+// ==========================================
+
+const AuthenticationTab = ({ status, onRefresh }) => {
+ const { showToast } = useToast();
+ const [domain, setDomain] = useState('');
+ const [selector, setSelector] = useState('mail');
+ const [dnsRecords, setDnsRecords] = useState(null);
+ const [installing, setInstalling] = useState(false);
+ const [generating, setGenerating] = useState(false);
+
+ async function handleInstallDkim() {
+ setInstalling(true);
+ try {
+ const result = await api.installDkim();
+ if (result.success) {
+ showToast('OpenDKIM installed', 'success');
+ onRefresh();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ } finally {
+ setInstalling(false);
+ }
+ }
+
+ async function handleGenerateKey() {
+ if (!domain) {
+ showToast('Please enter a domain', 'error');
+ return;
+ }
+ setGenerating(true);
+ try {
+ const result = await api.generateDkimKey({ domain, selector });
+ if (result.success) {
+ showToast('DKIM key generated', 'success');
+ loadDnsRecords();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ } finally {
+ setGenerating(false);
+ }
+ }
+
+ async function loadDnsRecords() {
+ if (!domain) return;
+ try {
+ const data = await api.getEmailDnsRecords(domain);
+ setDnsRecords(data.records || []);
+ } catch (err) {
+ showToast('Failed to load DNS records', 'error');
+ }
+ }
+
+ return (
+
+ {!status?.opendkim?.installed && (
+
+
+
OpenDKIM Not Installed
+
DKIM signs outgoing emails to prove they were sent from your server and have not been tampered with.
+
+ {installing ? 'Installing...' : 'Install OpenDKIM'}
+
+
+
+ )}
+
+
+
+
Generate DKIM Key
+
+
+
+
+
+ {generating ? 'Generating...' : 'Generate Key'}
+
+
+ Show DNS Records
+
+
+
+
+
+ {dnsRecords && dnsRecords.length > 0 && (
+
+
+
Required DNS Records
+
+
+
+ Add these records to your DNS settings for email authentication.
+
+
+ {dnsRecords.map((record, i) => (
+
+
+ {record.type}
+ {record.purpose}
+
+
+
+ Name:
+ {record.name}
+
+
+ Value:
+ {record.value}
+
+
+
+ ))}
+
+
+
+ )}
+
+ );
+};
+
+
+// ==========================================
+// MAIL QUEUE TAB
+// ==========================================
+
+const QueueTab = () => {
+ const { showToast } = useToast();
+ const [queue, setQueue] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [count, setCount] = useState(0);
+
+ useEffect(() => {
+ loadQueue();
+ }, []);
+
+ async function loadQueue() {
+ setLoading(true);
+ try {
+ const data = await api.getMailQueue();
+ setQueue(data.queue || []);
+ setCount(data.count || 0);
+ } catch (err) {
+ showToast('Failed to load mail queue', 'error');
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ async function handleFlush() {
+ try {
+ await api.flushMailQueue();
+ showToast('Queue flushed', 'success');
+ loadQueue();
+ } catch (err) {
+ showToast(err.message, 'error');
+ }
+ }
+
+ async function handleDelete(queueId) {
+ try {
+ await api.deleteQueuedMessage(queueId);
+ showToast('Message deleted', 'success');
+ loadQueue();
+ } catch (err) {
+ showToast(err.message, 'error');
+ }
+ }
+
+ if (loading) return
Loading mail queue...
;
+
+ return (
+
+
+
Mail Queue ({count} messages)
+
+ Refresh
+
+ Flush Queue
+
+
+
+
+ {queue.length === 0 ? (
+
+ ) : (
+
+
+
+
+ Queue ID
+ Size
+ Date
+ Sender
+ Recipients
+ Actions
+
+
+
+ {queue.map(item => (
+
+ {item.id}
+ {item.size} B
+ {item.date}
+ {item.sender}
+ {(item.recipients || []).join(', ')}
+
+ handleDelete(item.id)}
+ >
+ Delete
+
+
+
+ ))}
+
+
+
+ )}
+
+ );
+};
+
+
+// ==========================================
+// WEBMAIL TAB
+// ==========================================
+
+const WebmailTab = () => {
+ const { showToast } = useToast();
+ const [status, setStatus] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [installing, setInstalling] = useState(false);
+
+ useEffect(() => {
+ loadStatus();
+ }, []);
+
+ async function loadStatus() {
+ try {
+ const data = await api.getWebmailStatus();
+ setStatus(data);
+ } catch (err) {
+ console.error('Failed to load webmail status:', err);
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ async function handleInstall() {
+ setInstalling(true);
+ try {
+ const result = await api.installWebmail();
+ if (result.success) {
+ showToast('Roundcube installed', 'success');
+ loadStatus();
+ } else {
+ showToast(result.error, 'error');
+ }
+ } catch (err) {
+ showToast(err.message, 'error');
+ } finally {
+ setInstalling(false);
+ }
+ }
+
+ if (loading) return
Loading...
;
+
+ return (
+
+
+
+ {status?.installed ? (
+
+
+
+
Roundcube Webmail
+
Browser-based email client for your users
+
+
Installed
+
+ {status.url && (
+
+ Access webmail at: {status.url}
+
+ )}
+
+ ) : (
+
+
Roundcube Webmail
+
Roundcube provides a browser-based interface for users to read and send email.
+
+ {installing ? 'Installing...' : 'Install Roundcube'}
+
+
+ )}
+
+
+
+ );
+};
+
+
+// ==========================================
+// LOGS TAB
+// ==========================================
+
+const LogsTab = () => {
+ const [logs, setLogs] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [source, setSource] = useState(null);
+
+ useEffect(() => {
+ loadLogs();
+ }, []);
+
+ async function loadLogs() {
+ setLoading(true);
+ try {
+ const data = await api.getMailLogs(200);
+ setLogs(data.lines || []);
+ setSource(data.source);
+ } catch (err) {
+ console.error('Failed to load mail logs:', err);
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ if (loading) return
Loading logs...
;
+
+ return (
+
+
+
Mail Logs
+
+ {source && Source: {source} }
+ Refresh
+
+
+
+
+
+ {logs.length === 0 ? (
+
No mail logs available.
+ ) : (
+
{logs.join('\n')}
+ )}
+
+
+
+ );
+};
+
+export default Email;
diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx
index 0de38ae..614736c 100644
--- a/frontend/src/pages/Settings.jsx
+++ b/frontend/src/pages/Settings.jsx
@@ -2,6 +2,7 @@ import React, { useState, useEffect } from 'react';
import useTabParam from '../hooks/useTabParam';
import { useAuth } from '../contexts/AuthContext';
import { useTheme } from '../contexts/ThemeContext';
+import useDashboardLayout from '../hooks/useDashboardLayout';
import api from '../services/api';
import UsersTab from '../components/settings/UsersTab';
import AuditLogTab from '../components/settings/AuditLogTab';
@@ -784,8 +785,20 @@ Keep these codes in a safe place.`;
);
};
+const ACCENT_PRESETS = [
+ { label: 'Indigo', color: '#6366f1' },
+ { label: 'Ocean', color: '#0ea5e9' },
+ { label: 'Forest', color: '#10b981' },
+ { label: 'Sunset', color: '#f97316' },
+ { label: 'Rose', color: '#f43f5e' },
+ { label: 'Violet', color: '#8b5cf6' },
+ { label: 'Amber', color: '#f59e0b' },
+ { label: 'Cyan', color: '#06b6d4' },
+];
+
const AppearanceSettings = () => {
- const { theme, setTheme } = useTheme();
+ const { theme, setTheme, accentColor, setAccentColor } = useTheme();
+ const { widgets, toggleWidget, moveWidget, resetLayout } = useDashboardLayout();
return (
@@ -839,6 +852,79 @@ const AppearanceSettings = () => {
+
+
+
Accent Color
+
Choose the primary accent color used across the interface
+
+ {ACCENT_PRESETS.map(({ label, color }) => (
+ setAccentColor(color)}
+ >
+
+ {label}
+
+ ))}
+
+
+
Custom color
+
+ setAccentColor(e.target.value)}
+ />
+ {accentColor.toUpperCase()}
+
+
+
+
+
+
Dashboard Widgets
+
Toggle visibility and reorder widgets on the dashboard
+
+ {widgets.map((widget, idx) => (
+
+
+
+ toggleWidget(widget.id)}
+ />
+
+
+ {widget.label}
+
+
+ moveWidget(widget.id, 'up')}
+ disabled={idx === 0}
+ title="Move up"
+ >
+
+
+ moveWidget(widget.id, 'down')}
+ disabled={idx === widgets.length - 1}
+ title="Move down"
+ >
+
+
+
+
+ ))}
+
+
+
+ Reset to defaults
+
+
);
};
diff --git a/frontend/src/services/api.js b/frontend/src/services/api.js
index 59891f4..73147a1 100644
--- a/frontend/src/services/api.js
+++ b/frontend/src/services/api.js
@@ -2912,6 +2912,131 @@ class ApiService {
const baseUrl = this.baseUrl.replace('/api/v1', '');
return `${baseUrl}/api/servers/agent/download/${os}/${arch}`;
}
+
+ // ==========================================
+ // Email Server
+ // ==========================================
+
+ async getEmailStatus() {
+ return this.request('/email/status');
+ }
+
+ async getEmailConfig() {
+ return this.request('/email/config');
+ }
+
+ async updateEmailConfig(data) {
+ return this.request('/email/config', { method: 'PUT', body: data });
+ }
+
+ // Postfix
+ async installPostfix() {
+ return this.request('/email/postfix/install', { method: 'POST' });
+ }
+
+ async getPostfixConfig() {
+ return this.request('/email/postfix/config');
+ }
+
+ async updatePostfixConfig(data) {
+ return this.request('/email/postfix/config', { method: 'PUT', body: data });
+ }
+
+ // Mail Queue
+ async getMailQueue() {
+ return this.request('/email/queue');
+ }
+
+ async flushMailQueue() {
+ return this.request('/email/queue/flush', { method: 'POST' });
+ }
+
+ async deleteQueuedMessage(queueId) {
+ return this.request(`/email/queue/${queueId}`, { method: 'DELETE' });
+ }
+
+ // Dovecot
+ async installDovecot() {
+ return this.request('/email/dovecot/install', { method: 'POST' });
+ }
+
+ async getDovecotConfig() {
+ return this.request('/email/dovecot/config');
+ }
+
+ // Email Accounts
+ async getEmailAccounts() {
+ return this.request('/email/accounts');
+ }
+
+ async createEmailAccount(data) {
+ return this.request('/email/accounts', { method: 'POST', body: data });
+ }
+
+ async updateEmailAccount(accountId, data) {
+ return this.request(`/email/accounts/${accountId}`, { method: 'PUT', body: data });
+ }
+
+ async deleteEmailAccount(accountId) {
+ return this.request(`/email/accounts/${accountId}`, { method: 'DELETE' });
+ }
+
+ async setEmailForwarding(accountId, data) {
+ return this.request(`/email/accounts/${accountId}/forwarding`, { method: 'PUT', body: data });
+ }
+
+ // SpamAssassin
+ async installSpamAssassin() {
+ return this.request('/email/spamassassin/install', { method: 'POST' });
+ }
+
+ async getSpamAssassinConfig() {
+ return this.request('/email/spamassassin/config');
+ }
+
+ async updateSpamAssassinConfig(data) {
+ return this.request('/email/spamassassin/config', { method: 'PUT', body: data });
+ }
+
+ // DKIM
+ async installDkim() {
+ return this.request('/email/dkim/install', { method: 'POST' });
+ }
+
+ async generateDkimKey(data) {
+ return this.request('/email/dkim/generate', { method: 'POST', body: data });
+ }
+
+ async getEmailDnsRecords(domain) {
+ return this.request(`/email/dns/${domain}`);
+ }
+
+ // Service Control
+ async startEmailService(service) {
+ return this.request(`/email/services/${service}/start`, { method: 'POST' });
+ }
+
+ async stopEmailService(service) {
+ return this.request(`/email/services/${service}/stop`, { method: 'POST' });
+ }
+
+ async restartEmailService(service) {
+ return this.request(`/email/services/${service}/restart`, { method: 'POST' });
+ }
+
+ // Webmail
+ async getWebmailStatus() {
+ return this.request('/email/webmail/status');
+ }
+
+ async installWebmail() {
+ return this.request('/email/webmail/install', { method: 'POST' });
+ }
+
+ // Mail Logs
+ async getMailLogs(lines = 100) {
+ return this.request(`/email/logs?lines=${lines}`);
+ }
}
export const api = new ApiService();
diff --git a/frontend/src/styles/_theme-variables.less b/frontend/src/styles/_theme-variables.less
index f852c37..436ffbf 100644
--- a/frontend/src/styles/_theme-variables.less
+++ b/frontend/src/styles/_theme-variables.less
@@ -45,6 +45,12 @@
// Grid pattern
--grid-color: rgba(255, 255, 255, 0.02);
+
+ // Accent colors (overridden at runtime by ThemeContext)
+ --accent-primary: #6366f1;
+ --accent-hover: #4f46e5;
+ --accent-glow: rgba(99, 102, 241, 0.15);
+ --accent-shadow: rgba(99, 102, 241, 0.3);
}
// --------------------------------------------
diff --git a/frontend/src/styles/_variables.less b/frontend/src/styles/_variables.less
index c83473f..3d00cc8 100644
--- a/frontend/src/styles/_variables.less
+++ b/frontend/src/styles/_variables.less
@@ -60,10 +60,17 @@
// --------------------------------------------
// COLORS - Accent
// --------------------------------------------
-@accent-primary: #6366f1;
-@accent-hover: #4f46e5;
-@accent-glow: rgba(99, 102, 241, 0.15);
-@accent-shadow: rgba(99, 102, 241, 0.3);
+// Raw values for LESS compile-time functions (fade, darken, etc.)
+@accent-primary-raw: #6366f1;
+@accent-hover-raw: #4f46e5;
+@accent-glow-raw: rgba(99, 102, 241, 0.15);
+@accent-shadow-raw: rgba(99, 102, 241, 0.3);
+
+// Themed values using CSS custom properties (runtime-switchable)
+@accent-primary: var(--accent-primary);
+@accent-hover: var(--accent-hover);
+@accent-glow: var(--accent-glow);
+@accent-shadow: var(--accent-shadow);
// --------------------------------------------
// COLORS - Semantic
diff --git a/frontend/src/styles/components/_badges.less b/frontend/src/styles/components/_badges.less
index eb3a0d8..69b8d78 100644
--- a/frontend/src/styles/components/_badges.less
+++ b/frontend/src/styles/components/_badges.less
@@ -138,9 +138,9 @@
}
&.env-development {
- background: fade(@accent-primary, 15%);
+ background: fade(@accent-primary-raw, 15%);
color: @accent-primary;
- border-color: fade(@accent-primary, 30%);
+ border-color: fade(@accent-primary-raw, 30%);
}
&.env-staging {
diff --git a/frontend/src/styles/components/_build.less b/frontend/src/styles/components/_build.less
index 9f48d01..5e8c16e 100644
--- a/frontend/src/styles/components/_build.less
+++ b/frontend/src/styles/components/_build.less
@@ -86,7 +86,7 @@
// Current Deployment Card
.current-deployment {
- background: linear-gradient(135deg, fade(@success, 5%), fade(@accent-primary, 5%));
+ background: linear-gradient(135deg, fade(@success, 5%), fade(@accent-primary-raw, 5%));
border-color: fade(@success, 30%);
.deployment-current-info {
diff --git a/frontend/src/styles/components/_cards.less b/frontend/src/styles/components/_cards.less
index a10559b..91daf1f 100644
--- a/frontend/src/styles/components/_cards.less
+++ b/frontend/src/styles/components/_cards.less
@@ -111,11 +111,11 @@
}
// Default colors
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
color: @accent-primary;
&.backups {
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
color: @accent-primary;
}
diff --git a/frontend/src/styles/components/_env-vars.less b/frontend/src/styles/components/_env-vars.less
index 3042ea9..08f1068 100644
--- a/frontend/src/styles/components/_env-vars.less
+++ b/frontend/src/styles/components/_env-vars.less
@@ -74,7 +74,7 @@
}
&:has(input:checked) {
- background: fade(@accent-primary, 20%);
+ background: fade(@accent-primary-raw, 20%);
border-color: @accent-primary;
color: @accent-primary;
}
diff --git a/frontend/src/styles/components/_linked-apps.less b/frontend/src/styles/components/_linked-apps.less
index f1ce02a..14f539f 100644
--- a/frontend/src/styles/components/_linked-apps.less
+++ b/frontend/src/styles/components/_linked-apps.less
@@ -151,8 +151,8 @@
// Shared config info
.shared-config-info {
padding: @space-3;
- background: fade(@accent-primary, 10%);
- border: 1px solid fade(@accent-primary, 20%);
+ background: fade(@accent-primary-raw, 10%);
+ border: 1px solid fade(@accent-primary-raw, 20%);
border-radius: @radius-md;
}
diff --git a/frontend/src/styles/components/_modals.less b/frontend/src/styles/components/_modals.less
index f3c621a..df6e330 100644
--- a/frontend/src/styles/components/_modals.less
+++ b/frontend/src/styles/components/_modals.less
@@ -157,7 +157,7 @@
}
&-info {
- background: fade(@accent-primary, 15%);
+ background: fade(@accent-primary-raw, 15%);
color: @accent-primary;
}
}
diff --git a/frontend/src/styles/components/_notifications.less b/frontend/src/styles/components/_notifications.less
index 9362fed..0e86e8f 100644
--- a/frontend/src/styles/components/_notifications.less
+++ b/frontend/src/styles/components/_notifications.less
@@ -88,7 +88,7 @@
&:has(input:checked) {
background: @accent-glow;
- border-color: fade(@accent-primary, 40%);
+ border-color: fade(@accent-primary-raw, 40%);
span {
color: @text-primary;
@@ -240,7 +240,7 @@
}
&.info span {
- background: fade(@accent-primary, 20%);
+ background: fade(@accent-primary-raw, 20%);
color: @accent-primary;
}
diff --git a/frontend/src/styles/components/_query-runner.less b/frontend/src/styles/components/_query-runner.less
index 863e76c..92d819a 100644
--- a/frontend/src/styles/components/_query-runner.less
+++ b/frontend/src/styles/components/_query-runner.less
@@ -63,7 +63,7 @@
}
&.sqlite {
- background: fade(@accent-primary, 15%);
+ background: fade(@accent-primary-raw, 15%);
color: @accent-primary;
}
}
@@ -197,7 +197,7 @@
&.selected {
background: @accent-glow;
- border: 1px solid fade(@accent-primary, 30%);
+ border: 1px solid fade(@accent-primary-raw, 30%);
}
.table-name {
diff --git a/frontend/src/styles/components/_spinner.less b/frontend/src/styles/components/_spinner.less
index 8bbbdd9..643f76b 100644
--- a/frontend/src/styles/components/_spinner.less
+++ b/frontend/src/styles/components/_spinner.less
@@ -44,7 +44,7 @@
.spinner-ring {
border-radius: 50%;
border-style: solid;
- border-color: fade(@accent-primary, 30%);
+ border-color: fade(@accent-primary-raw, 30%);
border-top-color: @accent-primary;
animation: spinner-rotate 0.8s linear infinite;
}
diff --git a/frontend/src/styles/components/_toasts.less b/frontend/src/styles/components/_toasts.less
index 0ca592f..53e4d64 100644
--- a/frontend/src/styles/components/_toasts.less
+++ b/frontend/src/styles/components/_toasts.less
@@ -90,8 +90,8 @@
}
&-info {
- border-color: fade(@accent-primary, 30%);
- background: fade(@accent-primary, 10%);
+ border-color: fade(@accent-primary-raw, 30%);
+ background: fade(@accent-primary-raw, 10%);
.toast-icon {
color: @accent-primary;
diff --git a/frontend/src/styles/components/_two-factor.less b/frontend/src/styles/components/_two-factor.less
index fe20ab4..15da133 100644
--- a/frontend/src/styles/components/_two-factor.less
+++ b/frontend/src/styles/components/_two-factor.less
@@ -25,7 +25,7 @@
&:focus {
border-color: @accent-primary;
outline: none;
- box-shadow: 0 0 0 3px fade(@accent-primary, 20%);
+ box-shadow: 0 0 0 3px fade(@accent-primary-raw, 20%);
}
&::placeholder {
@@ -74,7 +74,7 @@
display: flex;
align-items: center;
justify-content: center;
- background: fade(@accent-primary, 15%);
+ background: fade(@accent-primary-raw, 15%);
border-radius: @radius-md;
color: @accent-primary;
flex-shrink: 0;
diff --git a/frontend/src/styles/components/_uptime.less b/frontend/src/styles/components/_uptime.less
index b07b3f7..b929ec4 100644
--- a/frontend/src/styles/components/_uptime.less
+++ b/frontend/src/styles/components/_uptime.less
@@ -141,7 +141,7 @@
justify-content: center;
gap: @space-2;
padding: @space-5;
- background: linear-gradient(135deg, fade(@accent-primary, 5%), fade(@success, 5%));
+ background: linear-gradient(135deg, fade(@accent-primary-raw, 5%), fade(@success, 5%));
border: 1px solid @border-subtle;
border-radius: @radius-lg;
margin-bottom: @space-6;
diff --git a/frontend/src/styles/components/_users.less b/frontend/src/styles/components/_users.less
index 1215065..eda0f84 100644
--- a/frontend/src/styles/components/_users.less
+++ b/frontend/src/styles/components/_users.less
@@ -13,7 +13,7 @@
display: flex;
gap: @space-4;
padding: @space-4;
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
border-radius: @radius-md;
margin-bottom: @space-6;
diff --git a/frontend/src/styles/main.less b/frontend/src/styles/main.less
index 9ba196c..7b5fbd8 100644
--- a/frontend/src/styles/main.less
+++ b/frontend/src/styles/main.less
@@ -78,4 +78,5 @@
@import 'pages/_wordpress';
@import 'pages/_wordpress-pipeline';
@import 'pages/_ssl';
+@import 'pages/_email';
@import 'pages/_setup-wizard';
diff --git a/frontend/src/styles/pages/_applications.less b/frontend/src/styles/pages/_applications.less
index 60001ef..ca0031d 100644
--- a/frontend/src/styles/pages/_applications.less
+++ b/frontend/src/styles/pages/_applications.less
@@ -177,7 +177,7 @@
align-items: center;
padding: @space-3 @space-4;
background: @accent-glow;
- border: 1px solid fade(@accent-primary, 30%);
+ border: 1px solid fade(@accent-primary-raw, 30%);
border-radius: @radius-lg;
gap: @space-4;
flex-wrap: wrap;
@@ -303,7 +303,7 @@
&.selected {
background: @accent-glow;
- border-color: fade(@accent-primary, 30%);
+ border-color: fade(@accent-primary-raw, 30%);
}
&:hover {
@@ -536,7 +536,7 @@
&.selected {
background: @accent-glow;
- border-color: fade(@accent-primary, 30%);
+ border-color: fade(@accent-primary-raw, 30%);
}
}
diff --git a/frontend/src/styles/pages/_backups.less b/frontend/src/styles/pages/_backups.less
index 8c4e6df..9f59922 100644
--- a/frontend/src/styles/pages/_backups.less
+++ b/frontend/src/styles/pages/_backups.less
@@ -11,7 +11,7 @@
}
&.apps {
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
color: @accent-primary;
}
diff --git a/frontend/src/styles/pages/_cron.less b/frontend/src/styles/pages/_cron.less
index 80f17b4..7f4a44d 100644
--- a/frontend/src/styles/pages/_cron.less
+++ b/frontend/src/styles/pages/_cron.less
@@ -33,7 +33,7 @@
// Stats icons
.stat-icon {
&.cron {
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
color: @accent-primary;
}
diff --git a/frontend/src/styles/pages/_email.less b/frontend/src/styles/pages/_email.less
new file mode 100644
index 0000000..632f8ac
--- /dev/null
+++ b/frontend/src/styles/pages/_email.less
@@ -0,0 +1,265 @@
+// ============================================
+// EMAIL SERVER PAGE STYLES
+// ============================================
+
+.email-page {
+ .tabs-nav {
+ display: flex;
+ gap: @space-2;
+ margin-bottom: @space-6;
+ border-bottom: 1px solid @border-subtle;
+ padding-bottom: @space-2;
+ overflow-x: auto;
+
+ .tab-btn {
+ padding: @space-2 @space-4;
+ border: none;
+ background: none;
+ color: @text-secondary;
+ cursor: pointer;
+ border-radius: @radius-md;
+ white-space: nowrap;
+ transition: all 0.2s;
+
+ &:hover {
+ background: @bg-hover;
+ color: @text-primary;
+ }
+
+ &.active {
+ background: @accent-primary;
+ color: white;
+ }
+ }
+ }
+}
+
+// Service overview cards
+.email-overview {
+ .services-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
+ gap: @space-4;
+ }
+}
+
+.email-service-card {
+ .card-body {
+ padding: @space-6;
+ }
+}
+
+.email-service-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: flex-start;
+ gap: @space-4;
+
+ h3 {
+ margin: 0 0 @space-1 0;
+ font-size: @font-size-md;
+ font-weight: @font-weight-semibold;
+ }
+
+ p {
+ margin: 0;
+ font-size: @font-size-sm;
+ }
+}
+
+.email-service-meta {
+ margin-top: @space-3;
+ font-size: @font-size-sm;
+}
+
+.email-service-actions {
+ display: flex;
+ gap: @space-2;
+ margin-top: @space-4;
+ padding-top: @space-4;
+ border-top: 1px solid @border-subtle;
+}
+
+// Accounts section
+.email-accounts {
+ .section-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: @space-4;
+
+ h2 {
+ margin: 0;
+ }
+ }
+}
+
+.email-create-form {
+ margin-bottom: @space-4;
+
+ .form-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
+ gap: @space-4;
+ }
+}
+
+// Postfix / Dovecot config
+.email-postfix,
+.email-dovecot,
+.email-spam {
+ .form-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
+ gap: @space-4;
+ }
+}
+
+// Config list (read-only)
+.config-list {
+ display: flex;
+ flex-direction: column;
+ gap: @space-2;
+}
+
+.config-item {
+ display: flex;
+ gap: @space-4;
+ padding: @space-2 0;
+ border-bottom: 1px solid @border-subtle;
+ font-size: @font-size-sm;
+
+ &:last-child {
+ border-bottom: none;
+ }
+
+ .config-key {
+ color: @text-secondary;
+ min-width: 200px;
+ font-family: @font-mono;
+ font-size: @font-size-xs;
+ }
+
+ .config-value {
+ color: @text-primary;
+ word-break: break-all;
+ }
+}
+
+// DNS records
+.dns-records-list {
+ display: flex;
+ flex-direction: column;
+ gap: @space-4;
+}
+
+.dns-record-item {
+ padding: @space-4;
+ background: @bg-elevated;
+ border-radius: @radius-md;
+ border: 1px solid @border-subtle;
+}
+
+.dns-record-header {
+ display: flex;
+ align-items: center;
+ gap: @space-3;
+ margin-bottom: @space-3;
+}
+
+.dns-record-purpose {
+ font-size: @font-size-sm;
+ color: @text-secondary;
+}
+
+.dns-record-details {
+ display: flex;
+ flex-direction: column;
+ gap: @space-2;
+}
+
+.dns-field {
+ display: flex;
+ align-items: flex-start;
+ gap: @space-2;
+
+ .dns-label {
+ font-size: @font-size-sm;
+ color: @text-tertiary;
+ min-width: 50px;
+ }
+
+ code {
+ font-family: @font-mono;
+ font-size: @font-size-xs;
+ color: @text-primary;
+ background: @bg-secondary;
+ padding: @space-1 @space-2;
+ border-radius: @radius-sm;
+ word-break: break-all;
+ }
+}
+
+// Mail queue
+.email-queue {
+ .section-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: @space-4;
+
+ h2 {
+ margin: 0;
+ }
+ }
+}
+
+// Webmail
+.email-webmail {
+ .webmail-status {
+ padding: @space-2;
+ }
+}
+
+// Logs
+.email-logs {
+ .section-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: @space-4;
+
+ h2 {
+ margin: 0;
+ }
+ }
+
+ .log-output {
+ font-family: @font-mono;
+ font-size: @font-size-xs;
+ line-height: @line-height-relaxed;
+ color: @text-primary;
+ background: @bg-body;
+ padding: @space-4;
+ border-radius: @radius-md;
+ border: 1px solid @border-subtle;
+ max-height: 600px;
+ overflow: auto;
+ white-space: pre-wrap;
+ word-break: break-all;
+ margin: 0;
+ }
+}
+
+// Authentication tab
+.email-authentication {
+ display: flex;
+ flex-direction: column;
+ gap: @space-4;
+
+ .form-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
+ gap: @space-4;
+ }
+}
diff --git a/frontend/src/styles/pages/_file-manager.less b/frontend/src/styles/pages/_file-manager.less
index bc912ff..6ea7640 100644
--- a/frontend/src/styles/pages/_file-manager.less
+++ b/frontend/src/styles/pages/_file-manager.less
@@ -380,7 +380,7 @@
&.active {
background: @accent-glow;
- border-color: fade(@accent-primary, 30%);
+ border-color: fade(@accent-primary-raw, 30%);
color: @accent-primary;
}
}
diff --git a/frontend/src/styles/pages/_git.less b/frontend/src/styles/pages/_git.less
index 3656356..1aa00f7 100644
--- a/frontend/src/styles/pages/_git.less
+++ b/frontend/src/styles/pages/_git.less
@@ -1068,7 +1068,7 @@
}
&.active {
- background: fade(@accent-primary, 5%);
+ background: fade(@accent-primary-raw, 5%);
}
.branch-info {
diff --git a/frontend/src/styles/pages/_monitoring.less b/frontend/src/styles/pages/_monitoring.less
index 1d458f6..1603951 100644
--- a/frontend/src/styles/pages/_monitoring.less
+++ b/frontend/src/styles/pages/_monitoring.less
@@ -85,7 +85,7 @@
}
&.interval {
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
color: @accent-primary;
}
diff --git a/frontend/src/styles/pages/_security.less b/frontend/src/styles/pages/_security.less
index 9c3e184..0ad68cb 100644
--- a/frontend/src/styles/pages/_security.less
+++ b/frontend/src/styles/pages/_security.less
@@ -68,8 +68,8 @@
}
&.info {
- border-color: fade(@accent-primary, 30%);
- .stat-icon { color: @accent-primary; background: fade(@accent-primary, 15%); }
+ border-color: fade(@accent-primary-raw, 30%);
+ .stat-icon { color: @accent-primary; background: fade(@accent-primary-raw, 15%); }
}
}
@@ -160,7 +160,7 @@
align-items: center;
justify-content: center;
border-radius: @radius-md;
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
color: @accent-primary;
margin-bottom: @space-3;
}
@@ -787,7 +787,7 @@
}
&.recommendations {
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
padding: @space-4;
border-radius: @radius-md;
diff --git a/frontend/src/styles/pages/_servers.less b/frontend/src/styles/pages/_servers.less
index 65ad2eb..79cf2c0 100644
--- a/frontend/src/styles/pages/_servers.less
+++ b/frontend/src/styles/pages/_servers.less
@@ -1185,7 +1185,7 @@
&.critical { background: fade(@danger, 15%); color: @danger; }
&.high { background: fade(@warning, 15%); color: @warning; }
- &.medium { background: fade(@accent-primary, 15%); color: @accent-primary; }
+ &.medium { background: fade(@accent-primary-raw, 15%); color: @accent-primary; }
&.low { background: @bg-hover; color: @text-tertiary; }
}
diff --git a/frontend/src/styles/pages/_settings.less b/frontend/src/styles/pages/_settings.less
index 47d9d86..508dcc4 100644
--- a/frontend/src/styles/pages/_settings.less
+++ b/frontend/src/styles/pages/_settings.less
@@ -316,7 +316,7 @@
&.active {
border-color: @accent-primary;
- background: fade(@accent-primary, 5%);
+ background: fade(@accent-primary-raw, 5%);
}
span {
@@ -482,7 +482,7 @@
}
&:focus + .toggle-slider {
- box-shadow: 0 0 0 2px fade(@accent-primary, 30%);
+ box-shadow: 0 0 0 2px fade(@accent-primary-raw, 30%);
}
}
@@ -696,8 +696,8 @@
// Star Prompt Card
.star-prompt-card {
position: relative;
- background: linear-gradient(135deg, @accent-glow 0%, fade(@accent-primary, 8%) 100%);
- border: 1px solid fade(@accent-primary, 30%);
+ background: linear-gradient(135deg, @accent-glow 0%, fade(@accent-primary-raw, 8%) 100%);
+ border: 1px solid fade(@accent-primary-raw, 30%);
border-radius: @radius-lg;
padding: @space-6;
display: flex;
@@ -974,3 +974,162 @@
font-weight: @font-weight-medium;
}
}
+
+// ============================================
+// ACCENT COLOR PRESETS
+// ============================================
+
+.accent-presets {
+ display: grid;
+ grid-template-columns: repeat(4, 1fr);
+ gap: @space-2;
+ margin-bottom: @space-4;
+
+ @media (max-width: @breakpoint-sm) {
+ grid-template-columns: repeat(2, 1fr);
+ }
+}
+
+.accent-preset {
+ display: flex;
+ align-items: center;
+ gap: @space-2;
+ padding: @space-2 @space-3;
+ background: transparent;
+ border: 2px solid @border-subtle;
+ border-radius: @radius-md;
+ cursor: pointer;
+ transition: all @transition-fast;
+
+ &:hover {
+ border-color: @border-active;
+ }
+
+ &.active {
+ border-color: @accent-primary;
+ background: @bg-hover;
+ }
+}
+
+.accent-swatch {
+ width: 20px;
+ height: 20px;
+ border-radius: @radius-full;
+ flex-shrink: 0;
+}
+
+.accent-label {
+ font-size: @font-size-sm;
+ font-weight: @font-weight-medium;
+ color: @text-primary;
+}
+
+.accent-custom {
+ padding-top: @space-3;
+ border-top: 1px solid @border-subtle;
+}
+
+.accent-custom-label {
+ display: block;
+ font-size: @font-size-sm;
+ font-weight: @font-weight-medium;
+ color: @text-secondary;
+ margin-bottom: @space-2;
+}
+
+.accent-custom-row {
+ display: flex;
+ align-items: center;
+ gap: @space-3;
+}
+
+.accent-custom-input {
+ width: 40px;
+ height: 32px;
+ padding: 0;
+ border: 1px solid @border-subtle;
+ border-radius: @radius-md;
+ background: transparent;
+ cursor: pointer;
+
+ &::-webkit-color-swatch-wrapper {
+ padding: 2px;
+ }
+
+ &::-webkit-color-swatch {
+ border: none;
+ border-radius: @radius-sm;
+ }
+}
+
+.accent-custom-hex {
+ font-family: @font-mono;
+ font-size: @font-size-sm;
+ color: @text-secondary;
+}
+
+// ============================================
+// DASHBOARD WIDGET CONFIG
+// ============================================
+
+.widget-list {
+ display: flex;
+ flex-direction: column;
+ gap: @space-2;
+}
+
+.widget-item {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding: @space-3 @space-4;
+ background: @bg-hover;
+ border-radius: @radius-md;
+ transition: opacity @transition-fast;
+
+ &--hidden {
+ opacity: 0.5;
+ }
+}
+
+.widget-item__info {
+ display: flex;
+ align-items: center;
+ gap: @space-3;
+}
+
+.widget-item__label {
+ font-size: @font-size-sm;
+ font-weight: @font-weight-medium;
+ color: @text-primary;
+}
+
+.widget-item__controls {
+ display: flex;
+ gap: @space-1;
+}
+
+.widget-move-btn {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ width: 28px;
+ height: 28px;
+ background: transparent;
+ border: 1px solid @border-subtle;
+ border-radius: @radius-sm;
+ color: @text-secondary;
+ cursor: pointer;
+ transition: all @transition-fast;
+
+ &:hover:not(:disabled) {
+ background: @bg-card;
+ color: @text-primary;
+ border-color: @border-active;
+ }
+
+ &:disabled {
+ opacity: 0.3;
+ cursor: not-allowed;
+ }
+}
diff --git a/frontend/src/styles/pages/_setup-wizard.less b/frontend/src/styles/pages/_setup-wizard.less
index a8e10a9..b5a8b14 100644
--- a/frontend/src/styles/pages/_setup-wizard.less
+++ b/frontend/src/styles/pages/_setup-wizard.less
@@ -73,7 +73,7 @@
&.completed {
border-color: @accent-primary;
- background: fade(@accent-primary, 15%);
+ background: fade(@accent-primary-raw, 15%);
color: @accent-primary;
}
}
@@ -151,7 +151,7 @@
&.selected {
border-color: @accent-primary;
- background: fade(@accent-primary, 8%);
+ background: fade(@accent-primary-raw, 8%);
}
.option-card-icon {
@@ -218,7 +218,7 @@
&.detected {
border-color: @accent-primary;
- background: fade(@accent-primary, 8%);
+ background: fade(@accent-primary-raw, 8%);
}
.tier-card-header {
@@ -360,7 +360,7 @@
display: inline-flex;
align-items: center;
padding: @space-1 @space-3;
- background: fade(@accent-primary, 12%);
+ background: fade(@accent-primary-raw, 12%);
color: @accent-primary;
border-radius: @radius-full;
font-size: @font-size-sm;
@@ -432,7 +432,7 @@
display: flex;
gap: @space-4;
padding: @space-4;
- background: fade(@accent-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
border-radius: @radius-md;
margin-bottom: @space-6;
diff --git a/frontend/src/styles/pages/_wordpress-pipeline.less b/frontend/src/styles/pages/_wordpress-pipeline.less
index b069f11..1e0bbe8 100644
--- a/frontend/src/styles/pages/_wordpress-pipeline.less
+++ b/frontend/src/styles/pages/_wordpress-pipeline.less
@@ -392,7 +392,7 @@
&:hover:not(:disabled) {
border-color: @color-primary;
color: @color-primary;
- background: fade(@color-primary, 5%);
+ background: fade(@accent-primary-raw, 5%);
}
&:disabled {
@@ -456,7 +456,7 @@
min-width: 20px;
height: 20px;
padding: 0 6px;
- background: fade(@color-primary, 12%);
+ background: fade(@accent-primary-raw, 12%);
color: @color-primary;
border-radius: 10px;
font-size: 11px;
@@ -596,7 +596,7 @@
}
.wp-env-badge.env-multidev {
- background: fade(@color-primary, 12%);
+ background: fade(@accent-primary-raw, 12%);
color: @color-primary;
}
@@ -609,8 +609,8 @@
align-items: center;
gap: @space-2;
padding: @space-3;
- background: fade(@color-primary, 8%);
- border: 1px solid fade(@color-primary, 25%);
+ background: fade(@accent-primary-raw, 8%);
+ border: 1px solid fade(@accent-primary-raw, 25%);
border-radius: @radius-md;
svg {
@@ -734,7 +734,7 @@
color: @text-tertiary;
&.env {
- background: fade(@color-primary, 10%);
+ background: fade(@accent-primary-raw, 10%);
color: @color-primary;
}
}
@@ -881,7 +881,7 @@
&:has(input:checked) {
border-color: @color-primary;
- background: fade(@color-primary, 5%);
+ background: fade(@accent-primary-raw, 5%);
}
input[type="radio"] {
@@ -978,7 +978,7 @@
color: @color-info;
}
&.primary {
- background: fade(@color-primary, 12%);
+ background: fade(@accent-primary-raw, 12%);
color: @color-primary;
}
&.default {
@@ -1388,7 +1388,7 @@
}
&.default {
- border-color: fade(@color-primary, 30%);
+ border-color: fade(@accent-primary-raw, 30%);
border-left: 3px solid @color-primary;
}
}
@@ -1425,7 +1425,7 @@
}
&.default {
- background: fade(@color-primary, 12%);
+ background: fade(@accent-primary-raw, 12%);
color: @color-primary;
}
}
@@ -2201,8 +2201,8 @@
align-items: center;
gap: @space-3;
padding: @space-3 @space-4;
- background: fade(@color-primary, 8%);
- border: 1px solid fade(@color-primary, 20%);
+ background: fade(@accent-primary-raw, 8%);
+ border: 1px solid fade(@accent-primary-raw, 20%);
border-radius: @radius-md;
margin-bottom: @space-4;
}
diff --git a/frontend/src/styles/pages/_wordpress.less b/frontend/src/styles/pages/_wordpress.less
index 9773761..c062bd7 100644
--- a/frontend/src/styles/pages/_wordpress.less
+++ b/frontend/src/styles/pages/_wordpress.less
@@ -1273,7 +1273,7 @@
&:focus {
outline: none;
border-color: @color-primary;
- box-shadow: 0 0 0 3px fade(@color-primary, 15%);
+ box-shadow: 0 0 0 3px fade(@accent-primary-raw, 15%);
}
&::placeholder {
diff --git a/frontend/src/styles/pages/_workflow.less b/frontend/src/styles/pages/_workflow.less
index 064d36a..a2892b4 100644
--- a/frontend/src/styles/pages/_workflow.less
+++ b/frontend/src/styles/pages/_workflow.less
@@ -329,9 +329,9 @@
}
&.palette-item-service:hover {
- background: fade(@accent-primary, 15%);
+ background: fade(@accent-primary-raw, 15%);
color: @accent-primary;
- border-color: fade(@accent-primary, 30%);
+ border-color: fade(@accent-primary-raw, 30%);
}
}
@@ -447,8 +447,8 @@
display: inline-flex;
align-items: center;
padding: 2px @space-2;
- background: fade(@accent-primary, 15%);
- border: 1px solid fade(@accent-primary, 30%);
+ background: fade(@accent-primary-raw, 15%);
+ border: 1px solid fade(@accent-primary-raw, 30%);
border-radius: @radius-full;
font-size: @font-size-xs;
font-family: @font-mono;
@@ -990,8 +990,8 @@
color: white;
&:hover {
- background: darken(@accent-primary, 5%);
- border-color: darken(@accent-primary, 5%);
+ background: darken(@accent-primary-raw, 5%);
+ border-color: darken(@accent-primary-raw, 5%);
color: white;
}
}
@@ -1191,7 +1191,7 @@
&:hover {
border-color: @accent-primary;
- background: fade(@accent-primary, 5%);
+ background: fade(@accent-primary-raw, 5%);
}
}
@@ -1369,7 +1369,7 @@
transition: background @transition-fast;
&:hover {
- background: darken(@accent-primary, 5%);
+ background: darken(@accent-primary-raw, 5%);
}
}
}
@@ -1587,7 +1587,7 @@
border-color: @accent-primary;
.workflow-node-header {
- background: fade(@accent-primary, 15%);
+ background: fade(@accent-primary-raw, 15%);
}
}
From 08d5c4badefd832327be2c5f3c5351aa10b8d6cd Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Wed, 4 Mar 2026 01:37:22 -0500
Subject: [PATCH 09/18] Add SSO/OAuth and DB migration framework
Add full SSO (Google, GitHub, generic OIDC, SAML) support and integrate Alembic-based DB migrations. Introduces OAuthIdentity model, sso_service, SSO API blueprint (authorize/callback/link/unlink/admin/config/test), and settings to enable/configure providers and enforce SSO-only logins. Integrates Flask-Migrate and a MigrationService with API endpoints for status/backup/apply/history and includes Alembic scaffolding (migrations/versions/001_baseline.py). Update app init to register SSO and migrations blueprints and initialize Migrate; replace fragile auto-column hack with MigrationService.prepare/check. Update User model (nullable password_hash, auth_provider, has_password/to_dict fields), add AuditLog SSO actions, add frontend SSO components/pages and a migration UI, and update ROADMAP and requirements. Notes: token encryption uses Fernet derived from SECRET_KEY; migrations may add the new auth_provider column at startup.
---
ROADMAP.md | 74 ++-
backend/app/__init__.py | 85 +--
backend/app/api/auth.py | 24 +-
backend/app/api/migrations.py | 54 ++
backend/app/api/sso.py | 328 ++++++++++
backend/app/models/__init__.py | 4 +-
backend/app/models/audit_log.py | 5 +
backend/app/models/oauth_identity.py | 38 ++
backend/app/models/user.py | 11 +-
backend/app/services/migration_service.py | 251 ++++++++
backend/app/services/settings_service.py | 23 +-
backend/app/services/sso_service.py | 503 +++++++++++++++
backend/cli.py | 150 +++--
backend/migrations/alembic.ini | 9 +
backend/migrations/env.py | 100 +++
backend/migrations/script.py.mako | 24 +
backend/migrations/versions/001_baseline.py | 601 ++++++++++++++++++
backend/requirements.txt | 7 +-
frontend/src/App.jsx | 25 +-
frontend/src/components/SSOProviderIcon.jsx | 27 +
frontend/src/components/ServerKitLogo.jsx | 27 +
frontend/src/components/Sidebar.jsx | 83 ++-
.../settings/MigrationHistoryTab.jsx | 103 +++
.../src/components/settings/SSOConfigTab.jsx | 316 +++++++++
frontend/src/contexts/AuthContext.jsx | 21 +-
frontend/src/contexts/ThemeContext.jsx | 27 +
frontend/src/pages/DatabaseMigration.jsx | 380 +++++++++++
frontend/src/pages/Login.jsx | 112 +++-
frontend/src/pages/Register.jsx | 4 +-
frontend/src/pages/SSOCallback.jsx | 80 +++
frontend/src/pages/Settings.jsx | 303 ++++++++-
frontend/src/pages/Setup.jsx | 10 +-
frontend/src/services/api.js | 76 +++
frontend/src/styles/layout/_sidebar.less | 52 ++
frontend/src/styles/main.less | 1 +
frontend/src/styles/pages/_auth.less | 70 ++
frontend/src/styles/pages/_downloads.less | 2 +-
.../src/styles/pages/_migration-wizard.less | 315 +++++++++
frontend/src/styles/pages/_settings.less | 364 ++++++++++-
39 files changed, 4450 insertions(+), 239 deletions(-)
create mode 100644 backend/app/api/migrations.py
create mode 100644 backend/app/api/sso.py
create mode 100644 backend/app/models/oauth_identity.py
create mode 100644 backend/app/services/migration_service.py
create mode 100644 backend/app/services/sso_service.py
create mode 100644 backend/migrations/alembic.ini
create mode 100644 backend/migrations/env.py
create mode 100644 backend/migrations/script.py.mako
create mode 100644 backend/migrations/versions/001_baseline.py
create mode 100644 frontend/src/components/SSOProviderIcon.jsx
create mode 100644 frontend/src/components/ServerKitLogo.jsx
create mode 100644 frontend/src/components/settings/MigrationHistoryTab.jsx
create mode 100644 frontend/src/components/settings/SSOConfigTab.jsx
create mode 100644 frontend/src/pages/DatabaseMigration.jsx
create mode 100644 frontend/src/pages/SSOCallback.jsx
create mode 100644 frontend/src/styles/pages/_migration-wizard.less
diff --git a/ROADMAP.md b/ROADMAP.md
index d71e485..b427146 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -166,17 +166,17 @@ This document outlines the development roadmap for ServerKit. Features are organ
---
-## Phase 13: Email Server Management (Planned)
+## Phase 13: Email Server Management (Completed)
**Priority: Medium**
-- [ ] Postfix mail server setup
-- [ ] Dovecot IMAP/POP3 configuration
-- [ ] Email account management
-- [ ] Spam filtering (SpamAssassin)
-- [ ] DKIM/SPF/DMARC configuration
-- [ ] Webmail interface integration
-- [ ] Email forwarding rules
+- [x] Postfix mail server setup
+- [x] Dovecot IMAP/POP3 configuration
+- [x] Email account management
+- [x] Spam filtering (SpamAssassin)
+- [x] DKIM/SPF/DMARC configuration
+- [x] Webmail interface integration
+- [x] Email forwarding rules
---
@@ -267,20 +267,54 @@ This document outlines the development roadmap for ServerKit. Features are organ
---
-## Phase 21: SSO & OAuth Login (Planned)
+## Phase 21: SSO & OAuth Login (Completed)
**Priority: High**
-- [ ] Google OAuth 2.0 login
-- [ ] GitHub OAuth login
-- [ ] Generic OpenID Connect (OIDC) provider support
-- [ ] SAML 2.0 support for enterprise environments
-- [ ] Social login UI (provider buttons on login page)
-- [ ] Account linking (connect OAuth identity to existing local account)
-- [ ] Auto-provisioning of new users on first SSO login
-- [ ] Configurable SSO settings (enable/disable providers, client ID/secret management)
-- [ ] Enforce SSO-only login (disable password auth for team members)
-- [ ] SSO session management and token refresh
+- [x] Google OAuth 2.0 login
+- [x] GitHub OAuth login
+- [x] Generic OpenID Connect (OIDC) provider support
+- [x] SAML 2.0 support for enterprise environments
+- [x] Social login UI (provider buttons on login page)
+- [x] Account linking (connect OAuth identity to existing local account)
+- [x] Auto-provisioning of new users on first SSO login
+- [x] Configurable SSO settings (enable/disable providers, client ID/secret management)
+- [x] Enforce SSO-only login (disable password auth for team members)
+- [x] SSO session management and token refresh
+
+---
+
+## Phase 22: Database Migrations & Schema Versioning (Planned)
+
+**Priority: High**
+
+Matomo-style update wizard — when the user logs in after an update and there are pending migrations, a popup/wizard guides them through the process visually.
+
+### Backend — Migration Engine
+- [ ] Integrate Flask-Migrate (Alembic) for versioned schema migrations
+- [ ] Generate initial migration from current model state as baseline
+- [ ] Replace `_auto_migrate_columns()` hack with proper Alembic migrations
+- [ ] Store schema version in a `schema_version` table (current version, history)
+- [ ] API endpoint to check migration status (`GET /api/v1/system/migrations`)
+- [ ] API endpoint to run pending migrations (`POST /api/v1/system/migrations/apply`)
+- [ ] API endpoint to rollback last migration (`POST /api/v1/system/migrations/rollback`)
+- [ ] Auto-detect pending migrations on login and flag the session
+- [ ] Pre-migration automatic DB backup before applying changes
+- [ ] Migration scripts for all existing model changes (retroactive baseline)
+
+### Frontend — Update Wizard UI
+- [ ] Full-screen modal/wizard that appears when pending migrations are detected
+- [ ] Step 1: "Update Available" — show current version vs new version, changelog summary
+- [ ] Step 2: "Backup" — auto-backup the database, show progress, confirm success
+- [ ] Step 3: "Apply Migrations" — run migrations with real-time progress/log output
+- [ ] Step 4: "Done" — success confirmation with summary of changes applied
+- [ ] Error handling: if a migration fails, show the error and offer rollback option
+- [ ] Block access to the panel until migrations are applied (like Matomo does)
+- [ ] Migration history page in Settings showing all past migrations and timestamps
+
+### CLI Fallback
+- [ ] CLI commands for headless/SSH scenarios (`flask db upgrade`, `flask db status`)
+- [ ] CLI rollback support (`flask db downgrade`)
---
@@ -289,7 +323,7 @@ This document outlines the development roadmap for ServerKit. Features are organ
| Version | Target Features | Status |
|---------|-----------------|--------|
| v0.9.0 | Core features, 2FA, Notifications, Security | Current |
-| v1.0.0 | Production-ready stable release | Planned |
+| v1.0.0 | Production-ready stable release, DB migrations | Planned |
| v1.1.0 | Multi-server, Git deployment | Planned |
| v1.2.0 | Backups, Advanced SSL, Advanced Security | Planned |
| v1.3.0 | Email server, API enhancements | Planned |
diff --git a/backend/app/__init__.py b/backend/app/__init__.py
index 444c311..329f138 100644
--- a/backend/app/__init__.py
+++ b/backend/app/__init__.py
@@ -5,11 +5,13 @@
from flask_cors import CORS
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
+from flask_migrate import Migrate
from config import config
db = SQLAlchemy()
jwt = JWTManager()
+migrate = Migrate()
limiter = Limiter(key_func=get_remote_address, default_limits=["100 per minute"])
socketio = None
@@ -33,6 +35,7 @@ def create_app(config_name=None):
# Initialize extensions
db.init_app(app)
+ migrate.init_app(app, db)
jwt.init_app(app)
limiter.init_app(app)
CORS(
@@ -167,6 +170,14 @@ def create_app(config_name=None):
from app.api.two_factor import two_factor_bp
app.register_blueprint(two_factor_bp, url_prefix='/api/v1/auth/2fa')
+ # Register blueprints - SSO / OAuth
+ from app.api.sso import sso_bp
+ app.register_blueprint(sso_bp, url_prefix='/api/v1/sso')
+
+ # Register blueprints - Database Migrations
+ from app.api.migrations import migrations_bp
+ app.register_blueprint(migrations_bp, url_prefix='/api/v1/migrations')
+
# Register blueprints - Admin (User Management, Settings, Audit Logs)
from app.api.admin import admin_bp
app.register_blueprint(admin_bp, url_prefix='/api/v1/admin')
@@ -183,12 +194,10 @@ def create_app(config_name=None):
from app.api.servers import servers_bp
app.register_blueprint(servers_bp, url_prefix='/api/v1/servers')
- # Create database tables
+ # Handle database migrations (Alembic)
with app.app_context():
- db.create_all()
-
- # Auto-migrate missing columns on existing tables
- _auto_migrate_columns(app)
+ from app.services.migration_service import MigrationService
+ MigrationService.check_and_prepare(app)
# Initialize default settings and migrate legacy roles
from app.services.settings_service import SettingsService
@@ -231,72 +240,6 @@ def get_socketio():
return socketio
-def _auto_migrate_columns(app):
- """Add missing columns to existing tables (lightweight auto-migration)."""
- import logging
- from sqlalchemy import text, inspect as sa_inspect
-
- logger = logging.getLogger(__name__)
-
- # Define expected columns per table: (table, column, sql_type)
- expected_columns = [
- # wordpress_sites table
- ('wordpress_sites', 'environment_type', "VARCHAR(20) DEFAULT 'standalone'"),
- ('wordpress_sites', 'multidev_branch', 'VARCHAR(200)'),
- ('wordpress_sites', 'is_locked', 'BOOLEAN DEFAULT 0'),
- ('wordpress_sites', 'locked_by', 'VARCHAR(100)'),
- ('wordpress_sites', 'locked_reason', 'VARCHAR(200)'),
- ('wordpress_sites', 'lock_expires_at', 'DATETIME'),
- ('wordpress_sites', 'compose_project_name', 'VARCHAR(100)'),
- ('wordpress_sites', 'container_prefix', 'VARCHAR(100)'),
- ('wordpress_sites', 'resource_limits', 'TEXT'),
- ('wordpress_sites', 'basic_auth_enabled', 'BOOLEAN DEFAULT 0'),
- ('wordpress_sites', 'basic_auth_user', 'VARCHAR(100)'),
- ('wordpress_sites', 'basic_auth_password_hash', 'VARCHAR(200)'),
- ('wordpress_sites', 'health_status', "VARCHAR(20) DEFAULT 'unknown'"),
- ('wordpress_sites', 'last_health_check', 'DATETIME'),
- ('wordpress_sites', 'disk_usage_bytes', 'BIGINT DEFAULT 0'),
- ('wordpress_sites', 'disk_usage_updated_at', 'DATETIME'),
- ('wordpress_sites', 'auto_sync_schedule', 'VARCHAR(100)'),
- ('wordpress_sites', 'auto_sync_enabled', 'BOOLEAN DEFAULT 0'),
- # applications table
- ('applications', 'private_slug', 'VARCHAR(50)'),
- ('applications', 'private_url_enabled', 'BOOLEAN DEFAULT 0'),
- ('applications', 'environment_type', "VARCHAR(20) DEFAULT 'standalone'"),
- ('applications', 'linked_app_id', 'INTEGER'),
- ('applications', 'shared_config', 'TEXT'),
- ]
-
- try:
- inspector = sa_inspect(db.engine)
- existing_tables = inspector.get_table_names()
-
- # Group by table for efficient inspection
- tables_checked = {}
- applied = 0
-
- for table, column, col_type in expected_columns:
- if table not in existing_tables:
- continue
-
- if table not in tables_checked:
- tables_checked[table] = [col['name'] for col in inspector.get_columns(table)]
-
- if column not in tables_checked[table]:
- try:
- db.session.execute(text(f'ALTER TABLE {table} ADD COLUMN {column} {col_type}'))
- applied += 1
- logger.info(f'Auto-migrated: added {table}.{column}')
- except Exception as e:
- logger.warning(f'Auto-migrate failed for {table}.{column}: {e}')
-
- if applied > 0:
- db.session.commit()
- logger.info(f'Auto-migration: applied {applied} column(s)')
- except Exception as e:
- logger.warning(f'Auto-migration check failed: {e}')
-
-
_auto_sync_thread = None
diff --git a/backend/app/api/auth.py b/backend/app/api/auth.py
index 707c6b4..8c8e0ef 100644
--- a/backend/app/api/auth.py
+++ b/backend/app/api/auth.py
@@ -21,9 +21,26 @@ def get_setup_status():
needs_setup = SettingsService.needs_setup()
registration_enabled = SettingsService.is_registration_enabled()
+ # SSO info for login page
+ from app.services import sso_service
+ sso_providers = sso_service.get_enabled_providers()
+ password_login_enabled = sso_service.is_password_login_allowed()
+
+ # Migration status
+ from app.services.migration_service import MigrationService
+ migration_status = MigrationService.get_status()
+
return jsonify({
'needs_setup': needs_setup,
- 'registration_enabled': registration_enabled
+ 'registration_enabled': registration_enabled,
+ 'sso_providers': sso_providers,
+ 'password_login_enabled': password_login_enabled,
+ 'needs_migration': migration_status['needs_migration'],
+ 'migration_info': {
+ 'pending_count': migration_status['pending_count'],
+ 'current_revision': migration_status['current_revision'],
+ 'head_revision': migration_status['head_revision'],
+ },
}), 200
@@ -123,6 +140,11 @@ def complete_onboarding():
@auth_bp.route('/login', methods=['POST'])
@limiter.limit("5 per minute")
def login():
+ # Check if password login is disabled (SSO-only mode)
+ from app.services import sso_service
+ if not sso_service.is_password_login_allowed():
+ return jsonify({'error': 'Password login is disabled. Please use SSO.'}), 403
+
data = request.get_json()
if not data:
diff --git a/backend/app/api/migrations.py b/backend/app/api/migrations.py
new file mode 100644
index 0000000..a5aea12
--- /dev/null
+++ b/backend/app/api/migrations.py
@@ -0,0 +1,54 @@
+from flask import Blueprint, jsonify, current_app
+from flask_jwt_extended import jwt_required, get_jwt_identity
+
+from app.models import User
+from app.services.migration_service import MigrationService
+
+migrations_bp = Blueprint('migrations', __name__)
+
+
+@migrations_bp.route('/status', methods=['GET'])
+def get_migration_status():
+ """Check if migrations are pending. No auth required (called before login)."""
+ status = MigrationService.get_status()
+ return jsonify(status), 200
+
+
+@migrations_bp.route('/backup', methods=['POST'])
+@jwt_required()
+def create_backup():
+ """Create a database backup before applying migrations. Admin only."""
+ user = User.query.get(get_jwt_identity())
+ if not user or user.role != User.ROLE_ADMIN:
+ return jsonify({'error': 'Admin access required'}), 403
+
+ result = MigrationService.create_backup(current_app)
+ if result['success']:
+ return jsonify(result), 200
+ return jsonify(result), 500
+
+
+@migrations_bp.route('/apply', methods=['POST'])
+@jwt_required()
+def apply_migrations():
+ """Apply all pending migrations. Admin only."""
+ user = User.query.get(get_jwt_identity())
+ if not user or user.role != User.ROLE_ADMIN:
+ return jsonify({'error': 'Admin access required'}), 403
+
+ result = MigrationService.apply_migrations(current_app)
+ if result['success']:
+ return jsonify(result), 200
+ return jsonify(result), 500
+
+
+@migrations_bp.route('/history', methods=['GET'])
+@jwt_required()
+def get_migration_history():
+ """Return all migration revisions. Admin only."""
+ user = User.query.get(get_jwt_identity())
+ if not user or user.role != User.ROLE_ADMIN:
+ return jsonify({'error': 'Admin access required'}), 403
+
+ history = MigrationService.get_migration_history(current_app)
+ return jsonify({'revisions': history}), 200
diff --git a/backend/app/api/sso.py b/backend/app/api/sso.py
new file mode 100644
index 0000000..2f58448
--- /dev/null
+++ b/backend/app/api/sso.py
@@ -0,0 +1,328 @@
+"""SSO / OAuth API blueprint."""
+from datetime import datetime
+from flask import Blueprint, request, jsonify, session
+from flask_jwt_extended import (
+ create_access_token, create_refresh_token, jwt_required, get_jwt_identity
+)
+from app import db
+from app.models import User, AuditLog
+from app.models.oauth_identity import OAuthIdentity
+from app.services import sso_service
+from app.services.settings_service import SettingsService
+from app.services.audit_service import AuditService
+
+sso_bp = Blueprint('sso', __name__)
+
+VALID_PROVIDERS = ('google', 'github', 'oidc', 'saml')
+
+
+# ------------------------------------------------------------------
+# Public endpoints (login flow)
+# ------------------------------------------------------------------
+
+@sso_bp.route('/providers', methods=['GET'])
+def list_providers():
+ """List enabled SSO providers + whether password login is available."""
+ return jsonify({
+ 'providers': sso_service.get_enabled_providers(),
+ 'password_login_enabled': sso_service.is_password_login_allowed(),
+ }), 200
+
+
+@sso_bp.route('/authorize/
', methods=['GET'])
+def authorize(provider):
+ """Generate OAuth authorize URL (state + PKCE)."""
+ if provider not in VALID_PROVIDERS or provider == 'saml':
+ return jsonify({'error': f'Invalid OAuth provider: {provider}'}), 400
+
+ redirect_uri = request.args.get('redirect_uri', '')
+ if not redirect_uri:
+ return jsonify({'error': 'redirect_uri is required'}), 400
+
+ try:
+ auth_url, state = sso_service.generate_auth_url(provider, redirect_uri)
+ return jsonify({'auth_url': auth_url, 'state': state}), 200
+ except Exception as e:
+ return jsonify({'error': str(e)}), 400
+
+
+@sso_bp.route('/callback/', methods=['POST'])
+def callback(provider):
+ """Exchange authorization code for tokens, find/create user, return JWT."""
+ if provider not in VALID_PROVIDERS or provider == 'saml':
+ return jsonify({'error': f'Invalid OAuth provider: {provider}'}), 400
+
+ data = request.get_json() or {}
+ code = data.get('code', '')
+ state = data.get('state', '')
+ redirect_uri = data.get('redirect_uri', '')
+
+ if not code or not state:
+ return jsonify({'error': 'code and state are required'}), 400
+
+ try:
+ profile = sso_service.handle_oauth_callback(provider, code, state, redirect_uri)
+ user, is_new = sso_service.find_or_create_user(provider, profile)
+ except ValueError as e:
+ AuditLog.log(
+ action=AuditLog.ACTION_SSO_LOGIN_FAILED,
+ details={'provider': provider, 'error': str(e)},
+ ip_address=request.remote_addr,
+ )
+ db.session.commit()
+ return jsonify({'error': str(e)}), 403
+ except Exception as e:
+ AuditLog.log(
+ action=AuditLog.ACTION_SSO_LOGIN_FAILED,
+ details={'provider': provider, 'error': str(e)},
+ ip_address=request.remote_addr,
+ )
+ db.session.commit()
+ return jsonify({'error': 'SSO authentication failed'}), 500
+
+ return _complete_sso_login(user, provider, is_new)
+
+
+@sso_bp.route('/saml/callback', methods=['POST'])
+def saml_callback():
+ """SAML ACS endpoint (form POST from IdP)."""
+ try:
+ request_data = {
+ 'https': request.scheme == 'https',
+ 'http_host': request.host,
+ 'script_name': request.path,
+ 'acs_url': request.url,
+ 'sp_entity_id': request.host_url.rstrip('/'),
+ }
+ profile = sso_service.handle_saml_callback(request.form, request_data)
+ user, is_new = sso_service.find_or_create_user('saml', profile)
+ except ValueError as e:
+ return jsonify({'error': str(e)}), 403
+ except Exception as e:
+ return jsonify({'error': 'SAML authentication failed'}), 500
+
+ return _complete_sso_login(user, 'saml', is_new)
+
+
+@sso_bp.route('/saml/metadata', methods=['GET'])
+def saml_metadata():
+ """Return SP metadata XML."""
+ try:
+ from onelogin.saml2.auth import OneLogin_Saml2_Auth
+ cfg = sso_service.get_provider_config('saml')
+ request_data = {
+ 'https': request.scheme == 'https',
+ 'http_host': request.host,
+ 'script_name': request.path,
+ 'acs_url': request.url_root.rstrip('/') + '/api/v1/sso/saml/callback',
+ 'sp_entity_id': request.host_url.rstrip('/'),
+ }
+ settings = sso_service.get_saml_settings(cfg, request_data)
+ saml_req = {
+ 'https': 'on' if request_data.get('https') else 'off',
+ 'http_host': request_data.get('http_host', ''),
+ 'script_name': request_data.get('script_name', ''),
+ }
+ auth = OneLogin_Saml2_Auth(saml_req, settings)
+ metadata = auth.get_settings().get_sp_metadata()
+ from flask import Response
+ return Response(metadata, mimetype='application/xml')
+ except Exception as e:
+ return jsonify({'error': str(e)}), 500
+
+
+# ------------------------------------------------------------------
+# Authenticated endpoints (account linking)
+# ------------------------------------------------------------------
+
+@sso_bp.route('/identities', methods=['GET'])
+@jwt_required()
+def list_identities():
+ """Current user's linked OAuth identities."""
+ user_id = get_jwt_identity()
+ identities = OAuthIdentity.query.filter_by(user_id=user_id).all()
+ return jsonify({'identities': [i.to_dict() for i in identities]}), 200
+
+
+@sso_bp.route('/link/', methods=['POST'])
+@jwt_required()
+def link_provider(provider):
+ """Link an OAuth identity to the current user."""
+ if provider not in VALID_PROVIDERS:
+ return jsonify({'error': f'Invalid provider: {provider}'}), 400
+
+ user_id = get_jwt_identity()
+ data = request.get_json() or {}
+ code = data.get('code', '')
+ state = data.get('state', '')
+ redirect_uri = data.get('redirect_uri', '')
+
+ if not code or not state:
+ return jsonify({'error': 'code and state are required'}), 400
+
+ try:
+ profile = sso_service.handle_oauth_callback(provider, code, state, redirect_uri)
+ except Exception as e:
+ return jsonify({'error': str(e)}), 400
+
+ # Check if this identity is already linked to another user
+ existing = OAuthIdentity.query.filter_by(
+ provider=provider,
+ provider_user_id=profile['provider_user_id'],
+ ).first()
+ if existing:
+ if existing.user_id == user_id:
+ return jsonify({'error': 'This identity is already linked to your account'}), 409
+ return jsonify({'error': 'This identity is already linked to another account'}), 409
+
+ identity = sso_service.link_identity(user_id, provider, profile, profile.get('_tokens', {}))
+ return jsonify({'identity': identity.to_dict()}), 201
+
+
+@sso_bp.route('/link/', methods=['DELETE'])
+@jwt_required()
+def unlink_provider(provider):
+ """Unlink an OAuth identity."""
+ if provider not in VALID_PROVIDERS:
+ return jsonify({'error': f'Invalid provider: {provider}'}), 400
+
+ user_id = get_jwt_identity()
+ try:
+ sso_service.unlink_identity(user_id, provider)
+ return jsonify({'message': f'{provider} identity unlinked'}), 200
+ except ValueError as e:
+ return jsonify({'error': str(e)}), 400
+
+
+# ------------------------------------------------------------------
+# Admin endpoints (SSO configuration)
+# ------------------------------------------------------------------
+
+@sso_bp.route('/admin/config', methods=['GET'])
+@jwt_required()
+def get_sso_config():
+ """All SSO settings (secrets redacted)."""
+ user = User.query.get(get_jwt_identity())
+ if not user or not user.is_admin:
+ return jsonify({'error': 'Admin access required'}), 403
+
+ config = {}
+ for key in SettingsService.DEFAULT_SETTINGS:
+ if not key.startswith('sso_'):
+ continue
+ val = SettingsService.get(key, SettingsService.DEFAULT_SETTINGS[key]['value'])
+ # Redact secrets
+ if 'secret' in key or 'cert' in key:
+ if val and isinstance(val, str) and len(val) > 4:
+ val = '****' + val[-4:]
+ config[key] = val
+ return jsonify({'config': config}), 200
+
+
+@sso_bp.route('/admin/config/', methods=['PUT'])
+@jwt_required()
+def update_provider_config(provider):
+ """Update a provider's SSO config."""
+ user = User.query.get(get_jwt_identity())
+ if not user or not user.is_admin:
+ return jsonify({'error': 'Admin access required'}), 403
+
+ if provider not in VALID_PROVIDERS:
+ return jsonify({'error': f'Invalid provider: {provider}'}), 400
+
+ data = request.get_json() or {}
+ prefix = f'sso_{provider}_'
+ updated = []
+
+ for key, value in data.items():
+ full_key = f'{prefix}{key}'
+ if full_key not in SettingsService.DEFAULT_SETTINGS:
+ continue
+ # Skip unchanged redacted secrets
+ if ('secret' in key or 'cert' in key) and isinstance(value, str) and value.startswith('****'):
+ continue
+ SettingsService.set(full_key, value, user_id=user.id)
+ updated.append(key)
+
+ AuditLog.log(
+ action=AuditLog.ACTION_SETTINGS_UPDATE,
+ user_id=user.id,
+ details={'sso_provider': provider, 'updated_fields': updated},
+ )
+ db.session.commit()
+ return jsonify({'message': f'{provider} SSO config updated', 'updated': updated}), 200
+
+
+@sso_bp.route('/admin/test/', methods=['POST'])
+@jwt_required()
+def test_provider(provider):
+ """Test provider connectivity."""
+ user = User.query.get(get_jwt_identity())
+ if not user or not user.is_admin:
+ return jsonify({'error': 'Admin access required'}), 403
+
+ result = sso_service.test_provider_connectivity(provider)
+ return jsonify(result), 200 if result['ok'] else 400
+
+
+@sso_bp.route('/admin/general', methods=['PUT'])
+@jwt_required()
+def update_general_settings():
+ """Update general SSO settings (auto_provision, force_sso, etc.)."""
+ user = User.query.get(get_jwt_identity())
+ if not user or not user.is_admin:
+ return jsonify({'error': 'Admin access required'}), 403
+
+ data = request.get_json() or {}
+ general_keys = ['sso_auto_provision', 'sso_default_role', 'sso_force_sso', 'sso_allowed_domains']
+ updated = []
+
+ for key in general_keys:
+ if key in data:
+ SettingsService.set(key, data[key], user_id=user.id)
+ updated.append(key)
+
+ AuditLog.log(
+ action=AuditLog.ACTION_SETTINGS_UPDATE,
+ user_id=user.id,
+ details={'sso_general': updated},
+ )
+ db.session.commit()
+ return jsonify({'message': 'SSO general settings updated', 'updated': updated}), 200
+
+
+# ------------------------------------------------------------------
+# Helper
+# ------------------------------------------------------------------
+
+def _complete_sso_login(user, provider, is_new):
+ """Issue JWT or trigger 2FA for an SSO-authenticated user."""
+ # Check 2FA
+ if user.totp_enabled:
+ temp_token = create_access_token(
+ identity=user.id,
+ additional_claims={'2fa_pending': True},
+ expires_delta=False,
+ )
+ return jsonify({
+ 'requires_2fa': True,
+ 'temp_token': temp_token,
+ 'message': 'Two-factor authentication required',
+ }), 200
+
+ user.last_login_at = datetime.utcnow()
+ user.reset_failed_login()
+ db.session.commit()
+
+ AuditService.log_login(user.id, success=True, details={'provider': provider, 'is_new': is_new})
+ db.session.commit()
+
+ access_token = create_access_token(identity=user.id)
+ refresh_token = create_refresh_token(identity=user.id)
+
+ return jsonify({
+ 'user': user.to_dict(),
+ 'access_token': access_token,
+ 'refresh_token': refresh_token,
+ 'is_new_user': is_new,
+ }), 200
diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py
index c3985ad..1458324 100644
--- a/backend/app/models/__init__.py
+++ b/backend/app/models/__init__.py
@@ -16,6 +16,7 @@
from app.models.promotion_job import PromotionJob
from app.models.sanitization_profile import SanitizationProfile
from app.models.email_account import EmailAccount
+from app.models.oauth_identity import OAuthIdentity
__all__ = [
'User', 'Application', 'Domain', 'EnvironmentVariable', 'EnvironmentVariableHistory',
@@ -23,5 +24,6 @@
'MetricsHistory', 'Workflow', 'GitWebhook', 'WebhookLog', 'GitDeployment',
'Server', 'ServerGroup', 'ServerMetrics', 'ServerCommand', 'AgentSession', 'SecurityAlert',
'WordPressSite', 'DatabaseSnapshot', 'SyncJob',
- 'EnvironmentActivity', 'PromotionJob', 'SanitizationProfile', 'EmailAccount'
+ 'EnvironmentActivity', 'PromotionJob', 'SanitizationProfile', 'EmailAccount',
+ 'OAuthIdentity'
]
diff --git a/backend/app/models/audit_log.py b/backend/app/models/audit_log.py
index b6dc0ab..b5b6575 100644
--- a/backend/app/models/audit_log.py
+++ b/backend/app/models/audit_log.py
@@ -39,6 +39,11 @@ class AuditLog(db.Model):
ACTION_DEPLOY = 'app.deploy'
ACTION_BACKUP_CREATE = 'backup.create'
ACTION_BACKUP_RESTORE = 'backup.restore'
+ ACTION_SSO_LOGIN = 'sso.login'
+ ACTION_SSO_LOGIN_FAILED = 'sso.login_failed'
+ ACTION_SSO_PROVISION = 'sso.provision'
+ ACTION_SSO_LINK = 'sso.link'
+ ACTION_SSO_UNLINK = 'sso.unlink'
def get_details(self):
"""Return parsed details JSON."""
diff --git a/backend/app/models/oauth_identity.py b/backend/app/models/oauth_identity.py
new file mode 100644
index 0000000..a27bbb7
--- /dev/null
+++ b/backend/app/models/oauth_identity.py
@@ -0,0 +1,38 @@
+from datetime import datetime
+from app import db
+
+
+class OAuthIdentity(db.Model):
+ """Links an external OAuth/SAML identity to a local user."""
+ __tablename__ = 'oauth_identities'
+ __table_args__ = (
+ db.UniqueConstraint('provider', 'provider_user_id', name='uq_provider_identity'),
+ )
+
+ id = db.Column(db.Integer, primary_key=True)
+ user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='CASCADE'), nullable=False, index=True)
+ provider = db.Column(db.String(50), nullable=False) # google, github, oidc, saml
+ provider_user_id = db.Column(db.String(256), nullable=False)
+ provider_email = db.Column(db.String(256), nullable=True)
+ provider_display_name = db.Column(db.String(256), nullable=True)
+ access_token_encrypted = db.Column(db.Text, nullable=True)
+ refresh_token_encrypted = db.Column(db.Text, nullable=True)
+ token_expires_at = db.Column(db.DateTime, nullable=True)
+ created_at = db.Column(db.DateTime, default=datetime.utcnow)
+ updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
+ last_login_at = db.Column(db.DateTime, nullable=True)
+
+ user = db.relationship('User', backref=db.backref('oauth_identities', lazy='dynamic'))
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'provider': self.provider,
+ 'provider_email': self.provider_email,
+ 'provider_display_name': self.provider_display_name,
+ 'created_at': self.created_at.isoformat() if self.created_at else None,
+ 'last_login_at': self.last_login_at.isoformat() if self.last_login_at else None,
+ }
+
+ def __repr__(self):
+ return f''
diff --git a/backend/app/models/user.py b/backend/app/models/user.py
index e5762a9..1d598a9 100644
--- a/backend/app/models/user.py
+++ b/backend/app/models/user.py
@@ -16,7 +16,8 @@ class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(120), unique=True, nullable=False, index=True)
username = db.Column(db.String(80), unique=True, nullable=False, index=True)
- password_hash = db.Column(db.String(256), nullable=False)
+ password_hash = db.Column(db.String(256), nullable=True)
+ auth_provider = db.Column(db.String(50), default='local') # local, google, github, oidc, saml
role = db.Column(db.String(20), default='developer') # 'admin', 'developer', 'viewer'
is_active = db.Column(db.Boolean, default=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow)
@@ -69,6 +70,8 @@ def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
+ if not self.password_hash:
+ return False
return check_password_hash(self.password_hash, password)
@property
@@ -90,6 +93,10 @@ def has_role(self, *roles):
"""Check if user has any of the specified roles."""
return self.role in roles
+ @property
+ def has_password(self):
+ return self.password_hash is not None
+
def to_dict(self):
return {
'id': self.id,
@@ -98,6 +105,8 @@ def to_dict(self):
'role': self.role,
'is_active': self.is_active,
'totp_enabled': self.totp_enabled,
+ 'auth_provider': self.auth_provider or 'local',
+ 'has_password': self.has_password,
'created_at': self.created_at.isoformat(),
'updated_at': self.updated_at.isoformat(),
'last_login_at': self.last_login_at.isoformat() if self.last_login_at else None,
diff --git a/backend/app/services/migration_service.py b/backend/app/services/migration_service.py
new file mode 100644
index 0000000..1cbc94a
--- /dev/null
+++ b/backend/app/services/migration_service.py
@@ -0,0 +1,251 @@
+"""Database migration service using Flask-Migrate (Alembic)."""
+
+import os
+import logging
+import shutil
+from datetime import datetime
+
+from alembic import command
+from alembic.config import Config as AlembicConfig
+from alembic.script import ScriptDirectory
+from alembic.migration import MigrationContext
+from sqlalchemy import inspect as sa_inspect, text
+
+logger = logging.getLogger(__name__)
+
+
+class MigrationService:
+ """Handles database migration detection, backup, and execution."""
+
+ _needs_migration = False
+ _current_revision = None
+ _head_revision = None
+ _pending_migrations = []
+
+ @classmethod
+ def _get_alembic_config(cls, app):
+ """Build an Alembic config pointing at the migrations directory."""
+ migrations_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'migrations')
+ cfg = AlembicConfig()
+ cfg.set_main_option('script_location', migrations_dir)
+ cfg.set_main_option('sqlalchemy.url', app.config['SQLALCHEMY_DATABASE_URI'])
+ return cfg
+
+ @classmethod
+ def _fix_missing_columns(cls, db):
+ """Add columns that may be missing from existing tables.
+
+ Runs raw SQL before any ORM queries to prevent crashes when models
+ reference columns that don't exist in the database yet.
+ """
+ inspector = sa_inspect(db.engine)
+ existing_tables = inspector.get_table_names()
+
+ if 'users' in existing_tables:
+ cols = {c['name'] for c in inspector.get_columns('users')}
+ if 'auth_provider' not in cols:
+ logger.info('Adding missing column: users.auth_provider')
+ with db.engine.begin() as conn:
+ conn.execute(text(
+ "ALTER TABLE users ADD COLUMN auth_provider VARCHAR(50) DEFAULT 'local'"
+ ))
+
+ @classmethod
+ def check_and_prepare(cls, app):
+ """Called on startup to detect migration state.
+
+ Three scenarios:
+ 1. Fresh install (no DB / no tables) -> upgrade head to create everything
+ 2. Existing install (tables exist, no alembic_version) -> stamp baseline, then check
+ 3. Normal state (alembic_version exists) -> compare current vs head
+ """
+ from app import db
+
+ try:
+ # Fix any missing columns before ORM queries can fail
+ cls._fix_missing_columns(db)
+
+ cfg = cls._get_alembic_config(app)
+ script = ScriptDirectory.from_config(cfg)
+ head = script.get_current_head()
+ cls._head_revision = head
+
+ inspector = sa_inspect(db.engine)
+ existing_tables = inspector.get_table_names()
+ has_alembic = 'alembic_version' in existing_tables
+
+ # Count real application tables (exclude alembic_version)
+ app_tables = [t for t in existing_tables if t != 'alembic_version']
+
+ if not app_tables and not has_alembic:
+ # Scenario 1: Fresh install — create everything via Alembic
+ logger.info('Fresh install detected — running alembic upgrade head')
+ with app.app_context():
+ command.upgrade(cfg, 'head')
+ cls._needs_migration = False
+ cls._current_revision = head
+ cls._pending_migrations = []
+ return
+
+ if app_tables and not has_alembic:
+ # Scenario 2: Existing install upgrading to Alembic
+ # Run upgrade so migrations can add missing columns to existing tables
+ logger.info('Existing install detected — running alembic upgrade head')
+ with app.app_context():
+ command.upgrade(cfg, 'head')
+
+ # Scenario 3 (or after stamping): Check current vs head
+ with db.engine.connect() as conn:
+ context = MigrationContext.configure(conn)
+ current_heads = context.get_current_heads()
+ cls._current_revision = current_heads[0] if current_heads else None
+
+ if cls._current_revision != head:
+ # Calculate pending migrations
+ cls._pending_migrations = []
+ for rev in script.walk_revisions():
+ if cls._current_revision and rev.revision == cls._current_revision:
+ break
+ cls._pending_migrations.append({
+ 'revision': rev.revision,
+ 'description': rev.doc or '',
+ 'down_revision': rev.down_revision,
+ })
+ cls._pending_migrations.reverse()
+ cls._needs_migration = len(cls._pending_migrations) > 0
+ if cls._needs_migration:
+ logger.warning(
+ f'Database migration needed: {len(cls._pending_migrations)} pending '
+ f'(current={cls._current_revision}, head={head})'
+ )
+ else:
+ cls._needs_migration = False
+ cls._pending_migrations = []
+
+ except Exception as e:
+ logger.error(f'Migration check failed: {e}')
+ # Don't block startup on migration check failure — fall back to no-migration state
+ cls._needs_migration = False
+
+ @classmethod
+ def get_status(cls):
+ """Return current migration status."""
+ return {
+ 'needs_migration': cls._needs_migration,
+ 'current_revision': cls._current_revision,
+ 'head_revision': cls._head_revision,
+ 'pending_count': len(cls._pending_migrations),
+ 'pending_migrations': cls._pending_migrations,
+ }
+
+ @classmethod
+ def create_backup(cls, app):
+ """Create a database backup before migration.
+
+ SQLite: file copy. PostgreSQL: pg_dump.
+ """
+ db_url = app.config['SQLALCHEMY_DATABASE_URI']
+ timestamp = datetime.utcnow().strftime('%Y%m%d_%H%M%S')
+
+ try:
+ if db_url.startswith('sqlite'):
+ # Extract file path from sqlite:///path or sqlite:////path
+ db_path = db_url.replace('sqlite:///', '')
+ if not os.path.isabs(db_path):
+ db_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), db_path)
+
+ if not os.path.exists(db_path):
+ return {'success': False, 'error': 'Database file not found'}
+
+ backup_dir = os.path.join(os.path.dirname(db_path), 'backups')
+ os.makedirs(backup_dir, exist_ok=True)
+ backup_name = f'serverkit_pre_migration_{timestamp}.db'
+ backup_path = os.path.join(backup_dir, backup_name)
+
+ shutil.copy2(db_path, backup_path)
+ logger.info(f'Database backup created: {backup_path}')
+ return {'success': True, 'path': backup_path}
+
+ elif 'postgresql' in db_url:
+ import subprocess
+ backup_dir = '/var/serverkit/backups/db'
+ os.makedirs(backup_dir, exist_ok=True)
+ backup_name = f'serverkit_pre_migration_{timestamp}.sql'
+ backup_path = os.path.join(backup_dir, backup_name)
+
+ result = subprocess.run(
+ ['pg_dump', db_url, '-f', backup_path],
+ capture_output=True, text=True, timeout=300
+ )
+ if result.returncode != 0:
+ return {'success': False, 'error': result.stderr}
+
+ logger.info(f'Database backup created: {backup_path}')
+ return {'success': True, 'path': backup_path}
+
+ else:
+ return {'success': False, 'error': f'Unsupported database type'}
+
+ except Exception as e:
+ logger.error(f'Backup failed: {e}')
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def apply_migrations(cls, app):
+ """Run all pending Alembic migrations."""
+ try:
+ cfg = cls._get_alembic_config(app)
+
+ with app.app_context():
+ command.upgrade(cfg, 'head')
+
+ # Update internal state
+ from app import db
+ with db.engine.connect() as conn:
+ context = MigrationContext.configure(conn)
+ current_heads = context.get_current_heads()
+ cls._current_revision = current_heads[0] if current_heads else None
+
+ cls._needs_migration = False
+ cls._pending_migrations = []
+
+ # Record in SystemSettings
+ try:
+ from app.models import SystemSettings
+ from app import db as _db
+ SystemSettings.set('schema_version', cls._current_revision)
+ SystemSettings.set('last_migration_at', datetime.utcnow().isoformat())
+ _db.session.commit()
+ except Exception as e:
+ logger.warning(f'Failed to record migration in settings: {e}')
+
+ logger.info(f'Migrations applied successfully (now at {cls._current_revision})')
+ return {'success': True, 'revision': cls._current_revision}
+
+ except Exception as e:
+ logger.error(f'Migration failed: {e}')
+ return {'success': False, 'error': str(e)}
+
+ @classmethod
+ def get_migration_history(cls, app):
+ """Return list of all Alembic revisions with descriptions."""
+ try:
+ cfg = cls._get_alembic_config(app)
+ script = ScriptDirectory.from_config(cfg)
+
+ revisions = []
+ for rev in script.walk_revisions():
+ revisions.append({
+ 'revision': rev.revision,
+ 'down_revision': rev.down_revision,
+ 'description': rev.doc or '',
+ 'is_current': rev.revision == cls._current_revision,
+ 'is_head': rev.revision == cls._head_revision,
+ })
+
+ revisions.reverse()
+ return revisions
+
+ except Exception as e:
+ logger.error(f'Failed to get migration history: {e}')
+ return []
diff --git a/backend/app/services/settings_service.py b/backend/app/services/settings_service.py
index 7810d78..69db1d7 100644
--- a/backend/app/services/settings_service.py
+++ b/backend/app/services/settings_service.py
@@ -37,7 +37,28 @@ class SettingsService:
'value': False,
'type': 'boolean',
'description': 'Enable developer mode for debugging tools and icon reference'
- }
+ },
+ # SSO / OAuth settings
+ 'sso_google_enabled': {'value': False, 'type': 'boolean', 'description': 'Enable Google OAuth login'},
+ 'sso_google_client_id': {'value': '', 'type': 'string', 'description': 'Google OAuth client ID'},
+ 'sso_google_client_secret': {'value': '', 'type': 'string', 'description': 'Google OAuth client secret'},
+ 'sso_github_enabled': {'value': False, 'type': 'boolean', 'description': 'Enable GitHub OAuth login'},
+ 'sso_github_client_id': {'value': '', 'type': 'string', 'description': 'GitHub OAuth client ID'},
+ 'sso_github_client_secret': {'value': '', 'type': 'string', 'description': 'GitHub OAuth client secret'},
+ 'sso_oidc_enabled': {'value': False, 'type': 'boolean', 'description': 'Enable generic OIDC login'},
+ 'sso_oidc_provider_name': {'value': '', 'type': 'string', 'description': 'OIDC provider display name'},
+ 'sso_oidc_client_id': {'value': '', 'type': 'string', 'description': 'OIDC client ID'},
+ 'sso_oidc_client_secret': {'value': '', 'type': 'string', 'description': 'OIDC client secret'},
+ 'sso_oidc_discovery_url': {'value': '', 'type': 'string', 'description': 'OIDC discovery URL'},
+ 'sso_saml_enabled': {'value': False, 'type': 'boolean', 'description': 'Enable SAML 2.0 login'},
+ 'sso_saml_entity_id': {'value': '', 'type': 'string', 'description': 'SAML SP entity ID'},
+ 'sso_saml_idp_metadata_url': {'value': '', 'type': 'string', 'description': 'SAML IdP metadata URL'},
+ 'sso_saml_idp_sso_url': {'value': '', 'type': 'string', 'description': 'SAML IdP SSO URL'},
+ 'sso_saml_idp_cert': {'value': '', 'type': 'string', 'description': 'SAML IdP certificate (PEM)'},
+ 'sso_auto_provision': {'value': True, 'type': 'boolean', 'description': 'Auto-create users on first SSO login'},
+ 'sso_default_role': {'value': 'developer', 'type': 'string', 'description': 'Default role for SSO-provisioned users'},
+ 'sso_force_sso': {'value': False, 'type': 'boolean', 'description': 'Disable password login (SSO only)'},
+ 'sso_allowed_domains': {'value': [], 'type': 'json', 'description': 'Restrict SSO to these email domains'},
}
@staticmethod
diff --git a/backend/app/services/sso_service.py b/backend/app/services/sso_service.py
new file mode 100644
index 0000000..2022bc7
--- /dev/null
+++ b/backend/app/services/sso_service.py
@@ -0,0 +1,503 @@
+"""SSO / OAuth 2.0 / SAML service — handles external identity authentication."""
+import hashlib
+import logging
+import secrets
+from datetime import datetime
+
+from authlib.integrations.requests_client import OAuth2Session
+from cryptography.fernet import Fernet
+from flask import current_app, session
+import base64
+import requests as http_requests
+
+from app import db
+from app.models import User, AuditLog
+from app.models.oauth_identity import OAuthIdentity
+from app.services.settings_service import SettingsService
+
+logger = logging.getLogger(__name__)
+
+# Built-in provider endpoint configs
+PROVIDER_ENDPOINTS = {
+ 'google': {
+ 'authorize_url': 'https://accounts.google.com/o/oauth2/v2/auth',
+ 'token_url': 'https://oauth2.googleapis.com/token',
+ 'userinfo_url': 'https://openidconnect.googleapis.com/v1/userinfo',
+ 'scopes': ['openid', 'email', 'profile'],
+ },
+ 'github': {
+ 'authorize_url': 'https://github.com/login/oauth/authorize',
+ 'token_url': 'https://github.com/login/oauth/access_token',
+ 'userinfo_url': 'https://api.github.com/user',
+ 'emails_url': 'https://api.github.com/user/emails',
+ 'scopes': ['read:user', 'user:email'],
+ },
+}
+
+
+def _get_fernet():
+ """Derive a Fernet key from SECRET_KEY."""
+ key_bytes = current_app.config['SECRET_KEY'].encode('utf-8')
+ digest = hashlib.sha256(key_bytes).digest()
+ return Fernet(base64.urlsafe_b64encode(digest))
+
+
+def encrypt_token(token):
+ if not token:
+ return None
+ return _get_fernet().encrypt(token.encode('utf-8')).decode('utf-8')
+
+
+def decrypt_token(encrypted):
+ if not encrypted:
+ return None
+ try:
+ return _get_fernet().decrypt(encrypted.encode('utf-8')).decode('utf-8')
+ except Exception:
+ return None
+
+
+def get_enabled_providers():
+ """Return list of enabled SSO providers for the login page."""
+ providers = []
+ if SettingsService.get('sso_google_enabled', False):
+ providers.append({'id': 'google', 'name': 'Google'})
+ if SettingsService.get('sso_github_enabled', False):
+ providers.append({'id': 'github', 'name': 'GitHub'})
+ if SettingsService.get('sso_oidc_enabled', False):
+ name = SettingsService.get('sso_oidc_provider_name', '') or 'OIDC'
+ providers.append({'id': 'oidc', 'name': name})
+ if SettingsService.get('sso_saml_enabled', False):
+ providers.append({'id': 'saml', 'name': 'SAML'})
+ return providers
+
+
+def is_password_login_allowed():
+ return not SettingsService.get('sso_force_sso', False)
+
+
+def get_provider_config(provider):
+ """Full config for a provider (internal use — includes secrets)."""
+ prefix = f'sso_{provider}_'
+ keys = [k for k in SettingsService.DEFAULT_SETTINGS if k.startswith(prefix)]
+ cfg = {}
+ for k in keys:
+ short = k[len(prefix):]
+ cfg[short] = SettingsService.get(k, SettingsService.DEFAULT_SETTINGS[k]['value'])
+ return cfg
+
+
+# ------------------------------------------------------------------
+# OAuth flow helpers
+# ------------------------------------------------------------------
+
+def generate_auth_url(provider, redirect_uri):
+ """Build the OAuth authorize URL with PKCE, return (auth_url, state)."""
+ cfg = get_provider_config(provider)
+
+ if provider in ('google', 'github'):
+ endpoints = PROVIDER_ENDPOINTS[provider]
+ authorize_url = endpoints['authorize_url']
+ scopes = endpoints['scopes']
+ client_id = cfg.get('client_id', '')
+ elif provider == 'oidc':
+ discovery = _fetch_oidc_discovery(cfg.get('discovery_url', ''))
+ authorize_url = discovery.get('authorization_endpoint', '')
+ scopes = ['openid', 'email', 'profile']
+ client_id = cfg.get('client_id', '')
+ else:
+ raise ValueError(f'OAuth authorize not supported for {provider}')
+
+ state = secrets.token_urlsafe(32)
+ code_verifier = secrets.token_urlsafe(64)
+ code_challenge = base64.urlsafe_b64encode(
+ hashlib.sha256(code_verifier.encode('ascii')).digest()
+ ).rstrip(b'=').decode('ascii')
+
+ # Store in server-side session
+ session['sso_state'] = state
+ session['sso_code_verifier'] = code_verifier
+ session['sso_provider'] = provider
+
+ params = {
+ 'client_id': client_id,
+ 'redirect_uri': redirect_uri,
+ 'response_type': 'code',
+ 'scope': ' '.join(scopes),
+ 'state': state,
+ 'code_challenge': code_challenge,
+ 'code_challenge_method': 'S256',
+ }
+
+ if provider == 'google':
+ params['access_type'] = 'offline'
+ params['prompt'] = 'select_account'
+
+ qs = '&'.join(f'{k}={v}' for k, v in params.items())
+ return f'{authorize_url}?{qs}', state
+
+
+def handle_oauth_callback(provider, code, state, redirect_uri):
+ """Exchange authorization code for tokens & fetch user profile."""
+ # Validate state
+ expected_state = session.pop('sso_state', None)
+ code_verifier = session.pop('sso_code_verifier', None)
+ if not expected_state or state != expected_state:
+ raise ValueError('Invalid OAuth state — possible CSRF')
+
+ cfg = get_provider_config(provider)
+
+ if provider in ('google', 'github'):
+ endpoints = PROVIDER_ENDPOINTS[provider]
+ token_url = endpoints['token_url']
+ userinfo_url = endpoints['userinfo_url']
+ client_id = cfg.get('client_id', '')
+ client_secret = cfg.get('client_secret', '')
+ elif provider == 'oidc':
+ discovery = _fetch_oidc_discovery(cfg.get('discovery_url', ''))
+ token_url = discovery.get('token_endpoint', '')
+ userinfo_url = discovery.get('userinfo_endpoint', '')
+ client_id = cfg.get('client_id', '')
+ client_secret = cfg.get('client_secret', '')
+ else:
+ raise ValueError(f'OAuth callback not supported for {provider}')
+
+ # Exchange code for tokens
+ oauth = OAuth2Session(
+ client_id=client_id,
+ client_secret=client_secret,
+ code_challenge_method='S256',
+ )
+ token_resp = oauth.fetch_token(
+ token_url,
+ code=code,
+ redirect_uri=redirect_uri,
+ code_verifier=code_verifier,
+ )
+
+ access_token = token_resp.get('access_token', '')
+ refresh_tok = token_resp.get('refresh_token')
+
+ # Fetch user info
+ headers = {'Authorization': f'Bearer {access_token}'}
+ if provider == 'github':
+ headers['Accept'] = 'application/vnd.github+json'
+
+ resp = http_requests.get(userinfo_url, headers=headers, timeout=10)
+ resp.raise_for_status()
+ info = resp.json()
+
+ profile = _normalize_profile(provider, info, headers)
+ profile['_tokens'] = {
+ 'access_token': access_token,
+ 'refresh_token': refresh_tok,
+ 'expires_at': token_resp.get('expires_at'),
+ }
+ return profile
+
+
+def _normalize_profile(provider, info, headers=None):
+ """Convert provider-specific userinfo into a standard dict."""
+ if provider == 'google':
+ return {
+ 'provider_user_id': info.get('sub', ''),
+ 'email': info.get('email', ''),
+ 'display_name': info.get('name', ''),
+ }
+ elif provider == 'github':
+ email = info.get('email') or ''
+ if not email and headers:
+ # GitHub may not include email in profile; fetch from /user/emails
+ try:
+ emails_url = PROVIDER_ENDPOINTS['github']['emails_url']
+ r = http_requests.get(emails_url, headers=headers, timeout=10)
+ r.raise_for_status()
+ for e in r.json():
+ if e.get('primary') and e.get('verified'):
+ email = e['email']
+ break
+ except Exception:
+ pass
+ return {
+ 'provider_user_id': str(info.get('id', '')),
+ 'email': email,
+ 'display_name': info.get('name') or info.get('login', ''),
+ }
+ else:
+ # Generic OIDC
+ return {
+ 'provider_user_id': info.get('sub', ''),
+ 'email': info.get('email', ''),
+ 'display_name': info.get('name', ''),
+ }
+
+
+# ------------------------------------------------------------------
+# SAML helpers
+# ------------------------------------------------------------------
+
+def get_saml_settings(provider_config, request_data):
+ """Build python3-saml settings dict."""
+ sp_entity_id = provider_config.get('entity_id', '') or request_data.get('sp_entity_id', '')
+ acs_url = request_data.get('acs_url', '')
+
+ return {
+ 'strict': True,
+ 'debug': False,
+ 'sp': {
+ 'entityId': sp_entity_id,
+ 'assertionConsumerService': {
+ 'url': acs_url,
+ 'binding': 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST',
+ },
+ 'NameIDFormat': 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress',
+ },
+ 'idp': {
+ 'entityId': provider_config.get('entity_id', ''),
+ 'singleSignOnService': {
+ 'url': provider_config.get('idp_sso_url', ''),
+ 'binding': 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect',
+ },
+ 'x509cert': provider_config.get('idp_cert', ''),
+ },
+ }
+
+
+def handle_saml_callback(saml_response_data, request_data):
+ """Validate SAML response and extract user profile."""
+ try:
+ from onelogin.saml2.auth import OneLogin_Saml2_Auth
+ except ImportError:
+ raise RuntimeError('python3-saml is not installed')
+
+ cfg = get_provider_config('saml')
+ saml_settings = get_saml_settings(cfg, request_data)
+
+ saml_req = {
+ 'https': 'on' if request_data.get('https') else 'off',
+ 'http_host': request_data.get('http_host', ''),
+ 'script_name': request_data.get('script_name', ''),
+ 'post_data': saml_response_data,
+ }
+
+ auth = OneLogin_Saml2_Auth(saml_req, saml_settings)
+ auth.process_response()
+
+ errors = auth.get_errors()
+ if errors:
+ raise ValueError(f'SAML validation failed: {", ".join(errors)}')
+
+ if not auth.is_authenticated():
+ raise ValueError('SAML authentication failed')
+
+ attrs = auth.get_attributes()
+ name_id = auth.get_nameid()
+
+ return {
+ 'provider_user_id': name_id,
+ 'email': attrs.get('email', [name_id])[0] if attrs.get('email') else name_id,
+ 'display_name': attrs.get('displayName', [''])[0] if attrs.get('displayName') else '',
+ '_tokens': {},
+ }
+
+
+# ------------------------------------------------------------------
+# User linking / provisioning
+# ------------------------------------------------------------------
+
+def find_or_create_user(provider, profile):
+ """
+ 1. Check OAuthIdentity by (provider, provider_user_id) → return linked user
+ 2. Check User by email → auto-link identity
+ 3. Auto-provision if enabled
+ Returns (user, is_new_user).
+ """
+ email = profile.get('email', '').lower().strip()
+
+ # Enforce allowed domains
+ allowed_domains = SettingsService.get('sso_allowed_domains', [])
+ if allowed_domains and email:
+ domain = email.split('@')[-1] if '@' in email else ''
+ if domain not in allowed_domains:
+ raise ValueError(f'Email domain @{domain} is not allowed for SSO login')
+
+ # 1. Check existing identity link
+ identity = OAuthIdentity.query.filter_by(
+ provider=provider,
+ provider_user_id=profile['provider_user_id'],
+ ).first()
+
+ if identity:
+ user = identity.user
+ if not user.is_active:
+ raise ValueError('Account is deactivated')
+ identity.last_login_at = datetime.utcnow()
+ _update_identity_tokens(identity, profile.get('_tokens', {}))
+ db.session.commit()
+ return user, False
+
+ # 2. Check existing user by email
+ user = User.query.filter_by(email=email).first() if email else None
+ if user:
+ if not user.is_active:
+ raise ValueError('Account is deactivated')
+ link_identity(user.id, provider, profile, profile.get('_tokens', {}))
+ return user, False
+
+ # 3. Auto-provision
+ if not SettingsService.get('sso_auto_provision', True):
+ raise ValueError('No matching account found and auto-provisioning is disabled')
+
+ if not email:
+ raise ValueError('SSO provider did not return an email address')
+
+ default_role = SettingsService.get('sso_default_role', 'developer')
+ username = _generate_username(email, profile.get('display_name', ''))
+
+ user = User(
+ email=email,
+ username=username,
+ role=default_role,
+ auth_provider=provider,
+ )
+ db.session.add(user)
+ db.session.flush()
+
+ link_identity(user.id, provider, profile, profile.get('_tokens', {}))
+
+ AuditLog.log(
+ action=AuditLog.ACTION_SSO_PROVISION,
+ user_id=user.id,
+ target_type='user',
+ target_id=user.id,
+ details={'provider': provider, 'email': email, 'role': default_role},
+ )
+ db.session.commit()
+ return user, True
+
+
+def link_identity(user_id, provider, profile, tokens=None):
+ """Create an OAuthIdentity record."""
+ tokens = tokens or {}
+ identity = OAuthIdentity(
+ user_id=user_id,
+ provider=provider,
+ provider_user_id=profile['provider_user_id'],
+ provider_email=profile.get('email'),
+ provider_display_name=profile.get('display_name'),
+ last_login_at=datetime.utcnow(),
+ )
+ _update_identity_tokens(identity, tokens)
+ db.session.add(identity)
+
+ AuditLog.log(
+ action=AuditLog.ACTION_SSO_LINK,
+ user_id=user_id,
+ target_type='user',
+ target_id=user_id,
+ details={'provider': provider},
+ )
+ db.session.commit()
+ return identity
+
+
+def unlink_identity(user_id, provider):
+ """Remove an OAuth identity link (prevent if it's the only auth method)."""
+ user = User.query.get(user_id)
+ if not user:
+ raise ValueError('User not found')
+
+ identity = OAuthIdentity.query.filter_by(user_id=user_id, provider=provider).first()
+ if not identity:
+ raise ValueError(f'No {provider} identity linked')
+
+ # Prevent unlinking if it's the only auth method
+ identity_count = OAuthIdentity.query.filter_by(user_id=user_id).count()
+ if not user.has_password and identity_count <= 1:
+ raise ValueError('Cannot unlink the only authentication method. Set a password first.')
+
+ db.session.delete(identity)
+ AuditLog.log(
+ action=AuditLog.ACTION_SSO_UNLINK,
+ user_id=user_id,
+ target_type='user',
+ target_id=user_id,
+ details={'provider': provider},
+ )
+ db.session.commit()
+
+
+# ------------------------------------------------------------------
+# Internal helpers
+# ------------------------------------------------------------------
+
+def _update_identity_tokens(identity, tokens):
+ if tokens.get('access_token'):
+ identity.access_token_encrypted = encrypt_token(tokens['access_token'])
+ if tokens.get('refresh_token'):
+ identity.refresh_token_encrypted = encrypt_token(tokens['refresh_token'])
+ if tokens.get('expires_at'):
+ try:
+ identity.token_expires_at = datetime.utcfromtimestamp(float(tokens['expires_at']))
+ except (ValueError, TypeError):
+ pass
+
+
+def _generate_username(email, display_name):
+ """Generate a unique username from email or display name."""
+ base = display_name.strip().lower().replace(' ', '_') if display_name else email.split('@')[0]
+ # Remove non-alphanumeric except underscores
+ base = ''.join(c for c in base if c.isalnum() or c == '_')[:60]
+ if not base:
+ base = 'user'
+
+ username = base
+ suffix = 1
+ while User.query.filter_by(username=username).first():
+ username = f'{base}_{suffix}'
+ suffix += 1
+ return username
+
+
+def _fetch_oidc_discovery(discovery_url):
+ """Fetch and cache OIDC discovery document."""
+ if not discovery_url:
+ raise ValueError('OIDC discovery URL not configured')
+ resp = http_requests.get(discovery_url, timeout=10)
+ resp.raise_for_status()
+ return resp.json()
+
+
+def test_provider_connectivity(provider):
+ """Test that a provider's endpoints are reachable."""
+ cfg = get_provider_config(provider)
+
+ if provider == 'google':
+ client_id = cfg.get('client_id', '')
+ if not client_id:
+ return {'ok': False, 'error': 'Client ID not configured'}
+ # Google discovery is always available
+ return {'ok': True, 'message': 'Google OAuth endpoints reachable'}
+ elif provider == 'github':
+ client_id = cfg.get('client_id', '')
+ if not client_id:
+ return {'ok': False, 'error': 'Client ID not configured'}
+ return {'ok': True, 'message': 'GitHub OAuth endpoints reachable'}
+ elif provider == 'oidc':
+ try:
+ discovery = _fetch_oidc_discovery(cfg.get('discovery_url', ''))
+ if 'authorization_endpoint' in discovery:
+ return {'ok': True, 'message': 'OIDC discovery successful'}
+ return {'ok': False, 'error': 'Discovery document missing authorization_endpoint'}
+ except Exception as e:
+ return {'ok': False, 'error': str(e)}
+ elif provider == 'saml':
+ idp_sso_url = cfg.get('idp_sso_url', '')
+ if not idp_sso_url:
+ return {'ok': False, 'error': 'IdP SSO URL not configured'}
+ idp_cert = cfg.get('idp_cert', '')
+ if not idp_cert:
+ return {'ok': False, 'error': 'IdP certificate not configured'}
+ return {'ok': True, 'message': 'SAML configuration looks valid'}
+ else:
+ return {'ok': False, 'error': f'Unknown provider: {provider}'}
diff --git a/backend/cli.py b/backend/cli.py
index caa4a39..13deb2e 100644
--- a/backend/cli.py
+++ b/backend/cli.py
@@ -206,80 +206,102 @@ def generate_keys():
@cli.command()
def init_db():
- """Initialize the database."""
+ """Initialize the database using Alembic migrations."""
app = create_app()
with app.app_context():
- db.create_all()
- click.echo(click.style('Database initialized successfully!', fg='green'))
+ from app.services.migration_service import MigrationService
+ result = MigrationService.apply_migrations(app)
+ if result['success']:
+ click.echo(click.style(f'Database initialized successfully (revision: {result["revision"]})!', fg='green'))
+ else:
+ click.echo(click.style(f'Database initialization failed: {result["error"]}', fg='red'))
+ sys.exit(1)
@cli.command()
-def migrate_db():
- """Apply database migrations for missing columns."""
+def db_status():
+ """Show current database migration status."""
app = create_app()
with app.app_context():
- from sqlalchemy import text, inspect
-
- inspector = inspect(db.engine)
- existing_tables = inspector.get_table_names()
-
- # Define all expected columns per table
- expected_columns = [
- # applications table
- ('applications', 'private_slug', 'VARCHAR(50)'),
- ('applications', 'private_url_enabled', 'BOOLEAN DEFAULT 0'),
- ('applications', 'environment_type', "VARCHAR(20) DEFAULT 'standalone'"),
- ('applications', 'linked_app_id', 'INTEGER'),
- ('applications', 'shared_config', 'TEXT'),
- # wordpress_sites table
- ('wordpress_sites', 'environment_type', "VARCHAR(20) DEFAULT 'standalone'"),
- ('wordpress_sites', 'multidev_branch', 'VARCHAR(200)'),
- ('wordpress_sites', 'is_locked', 'BOOLEAN DEFAULT 0'),
- ('wordpress_sites', 'locked_by', 'VARCHAR(100)'),
- ('wordpress_sites', 'locked_reason', 'VARCHAR(200)'),
- ('wordpress_sites', 'lock_expires_at', 'DATETIME'),
- ('wordpress_sites', 'compose_project_name', 'VARCHAR(100)'),
- ('wordpress_sites', 'container_prefix', 'VARCHAR(100)'),
- ('wordpress_sites', 'resource_limits', 'TEXT'),
- ('wordpress_sites', 'basic_auth_enabled', 'BOOLEAN DEFAULT 0'),
- ('wordpress_sites', 'basic_auth_user', 'VARCHAR(100)'),
- ('wordpress_sites', 'basic_auth_password_hash', 'VARCHAR(200)'),
- ('wordpress_sites', 'health_status', "VARCHAR(20) DEFAULT 'unknown'"),
- ('wordpress_sites', 'last_health_check', 'DATETIME'),
- ('wordpress_sites', 'disk_usage_bytes', 'BIGINT DEFAULT 0'),
- ('wordpress_sites', 'disk_usage_updated_at', 'DATETIME'),
- ('wordpress_sites', 'auto_sync_schedule', 'VARCHAR(100)'),
- ('wordpress_sites', 'auto_sync_enabled', 'BOOLEAN DEFAULT 0'),
- ]
+ from app.services.migration_service import MigrationService
+ status = MigrationService.get_status()
+
+ click.echo(f"\nCurrent revision: {status['current_revision'] or 'none'}")
+ click.echo(f"Head revision: {status['head_revision'] or 'none'}")
+ click.echo(f"Pending: {status['pending_count']}")
+
+ if status['pending_migrations']:
+ click.echo(f"\nPending migrations:")
+ for m in status['pending_migrations']:
+ click.echo(f" - {m['revision']}: {m['description']}")
+ else:
+ click.echo(click.style('\nDatabase is up to date.', fg='green'))
+ click.echo()
- # Check which columns are missing
- table_columns_cache = {}
- migrations = []
- for table, column, col_type in expected_columns:
- if table not in existing_tables:
- continue
- if table not in table_columns_cache:
- table_columns_cache[table] = [col['name'] for col in inspector.get_columns(table)]
- if column not in table_columns_cache[table]:
- migrations.append((table, column, col_type))
+@cli.command()
+@click.option('--no-backup', is_flag=True, help='Skip creating a backup before migrating')
+def db_migrate(no_backup):
+ """Apply pending database migrations."""
+ app = create_app()
+ with app.app_context():
+ from app.services.migration_service import MigrationService
+ status = MigrationService.get_status()
- if not migrations:
+ if not status['needs_migration']:
click.echo(click.style('Database is up to date. No migrations needed.', fg='green'))
return
- click.echo(f'Found {len(migrations)} migration(s) to apply...')
+ click.echo(f'Found {status["pending_count"]} pending migration(s):')
+ for m in status['pending_migrations']:
+ click.echo(f' - {m["revision"]}: {m["description"]}')
+
+ if not no_backup:
+ click.echo('\nCreating backup...')
+ backup = MigrationService.create_backup(app)
+ if backup['success']:
+ click.echo(click.style(f' Backup saved to: {backup["path"]}', fg='green'))
+ else:
+ click.echo(click.style(f' Backup failed: {backup["error"]}', fg='red'))
+ if not click.confirm('Continue without backup?'):
+ return
+
+ click.echo('\nApplying migrations...')
+ result = MigrationService.apply_migrations(app)
+ if result['success']:
+ click.echo(click.style(f'\nMigrations applied! Now at revision: {result["revision"]}', fg='green'))
+ else:
+ click.echo(click.style(f'\nMigration failed: {result["error"]}', fg='red'))
+ sys.exit(1)
- for table, column, col_type in migrations:
- try:
- sql = f'ALTER TABLE {table} ADD COLUMN {column} {col_type}'
- db.session.execute(text(sql))
- click.echo(click.style(f' ✓ Added column {table}.{column}', fg='green'))
- except Exception as e:
- click.echo(click.style(f' ✗ Failed to add {table}.{column}: {e}', fg='red'))
- db.session.commit()
- click.echo(click.style('\nMigrations completed!', fg='green'))
+@cli.command()
+def db_history():
+ """Show all database migration revisions."""
+ app = create_app()
+ with app.app_context():
+ from app.services.migration_service import MigrationService
+ history = MigrationService.get_migration_history(app)
+
+ if not history:
+ click.echo('No migration history found.')
+ return
+
+ click.echo(f"\n{'Revision':<20} {'Description':<50} {'Status'}")
+ click.echo('-' * 80)
+
+ for rev in history:
+ status_parts = []
+ if rev['is_current']:
+ status_parts.append('CURRENT')
+ if rev['is_head']:
+ status_parts.append('HEAD')
+ status = ', '.join(status_parts) if status_parts else ''
+
+ desc = rev['description'][:48] if rev['description'] else ''
+ click.echo(f"{rev['revision']:<20} {desc:<50} {status}")
+
+ click.echo()
@cli.command()
@@ -542,11 +564,15 @@ def factory_reset():
except Exception as e:
click.echo(click.style(f'✗ Failed to clear template cache: {e}', fg='red'))
- # 7. Drop and recreate database
+ # 7. Drop and recreate database via Alembic
try:
db.drop_all()
- db.create_all()
- click.echo(click.style('✓ Reset database', fg='green'))
+ from app.services.migration_service import MigrationService
+ result = MigrationService.apply_migrations(app)
+ if result['success']:
+ click.echo(click.style('✓ Reset database', fg='green'))
+ else:
+ click.echo(click.style(f'✗ Migration after reset failed: {result["error"]}', fg='red'))
except Exception as e:
click.echo(click.style(f'✗ Failed to reset database: {e}', fg='red'))
diff --git a/backend/migrations/alembic.ini b/backend/migrations/alembic.ini
new file mode 100644
index 0000000..5f8ee17
--- /dev/null
+++ b/backend/migrations/alembic.ini
@@ -0,0 +1,9 @@
+# A generic, single database configuration.
+
+[alembic]
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
diff --git a/backend/migrations/env.py b/backend/migrations/env.py
new file mode 100644
index 0000000..8390ac2
--- /dev/null
+++ b/backend/migrations/env.py
@@ -0,0 +1,100 @@
+import logging
+from logging.config import fileConfig
+
+from flask import current_app
+from alembic import context
+
+# Alembic Config object
+config = context.config
+
+# Set up loggers
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+logger = logging.getLogger('alembic.env')
+
+
+def get_engine():
+ try:
+ # Flask-Migrate provides the engine via current_app
+ return current_app.extensions['migrate'].db.get_engine()
+ except (TypeError, AttributeError):
+ # Fallback for CLI usage outside Flask context
+ return current_app.extensions['migrate'].db.engine
+
+
+def get_engine_url():
+ try:
+ return get_engine().url.render_as_string(hide_password=False).replace('%', '%%')
+ except AttributeError:
+ return str(get_engine().url).replace('%', '%%')
+
+
+# Import all models so Alembic can detect them
+def import_models():
+ # noinspection PyUnresolvedReferences
+ from app.models import ( # noqa: F401
+ User, Application, Domain, EnvironmentVariable, EnvironmentVariableHistory,
+ NotificationPreferences, Deployment, DeploymentDiff, SystemSettings, AuditLog,
+ MetricsHistory, Workflow, GitWebhook, WebhookLog, GitDeployment,
+ Server, ServerGroup, ServerMetrics, ServerCommand, AgentSession, SecurityAlert,
+ WordPressSite, DatabaseSnapshot, SyncJob,
+ EnvironmentActivity, PromotionJob, SanitizationProfile, EmailAccount,
+ OAuthIdentity
+ )
+
+
+config.set_main_option('sqlalchemy.url', get_engine_url())
+target_db = current_app.extensions['migrate'].db
+
+
+def get_metadata():
+ if hasattr(target_db, 'metadatas'):
+ return target_db.metadatas[None]
+ return target_db.metadata
+
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode."""
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=get_metadata(),
+ literal_binds=True,
+ )
+
+ import_models()
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode."""
+
+ def process_revision_directives(context, revision, directives):
+ if getattr(config.cmd_opts, 'autogenerate', False):
+ script = directives[0]
+ if script.upgrade_ops.is_empty():
+ directives[:] = []
+ logger.info('No changes in schema detected.')
+
+ connectable = get_engine()
+
+ import_models()
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection,
+ target_metadata=get_metadata(),
+ process_revision_directives=process_revision_directives,
+ render_as_batch=True, # Required for SQLite ALTER TABLE support
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/backend/migrations/script.py.mako b/backend/migrations/script.py.mako
new file mode 100644
index 0000000..2c01563
--- /dev/null
+++ b/backend/migrations/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
diff --git a/backend/migrations/versions/001_baseline.py b/backend/migrations/versions/001_baseline.py
new file mode 100644
index 0000000..b751633
--- /dev/null
+++ b/backend/migrations/versions/001_baseline.py
@@ -0,0 +1,601 @@
+"""Baseline migration capturing full schema.
+
+Revision ID: 001_baseline
+Revises:
+Create Date: 2026-03-04
+
+For fresh installs: creates all tables from scratch.
+For existing DBs: acts as a stamp point (tables already exist).
+"""
+from alembic import op
+import sqlalchemy as sa
+
+revision = '001_baseline'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # Use batch mode and check if tables already exist to support both
+ # fresh installs and existing databases being stamped.
+ conn = op.get_bind()
+ inspector = sa.inspect(conn)
+ existing_tables = inspector.get_table_names()
+
+ if 'users' not in existing_tables:
+ op.create_table('users',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('email', sa.String(120), unique=True, nullable=False, index=True),
+ sa.Column('username', sa.String(80), unique=True, nullable=False, index=True),
+ sa.Column('password_hash', sa.String(256), nullable=True),
+ sa.Column('auth_provider', sa.String(50), server_default='local'),
+ sa.Column('role', sa.String(20), server_default='developer'),
+ sa.Column('is_active', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('last_login_at', sa.DateTime(), nullable=True),
+ sa.Column('created_by', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
+ sa.Column('failed_login_count', sa.Integer(), server_default='0'),
+ sa.Column('locked_until', sa.DateTime(), nullable=True),
+ sa.Column('totp_secret', sa.String(32), nullable=True),
+ sa.Column('totp_enabled', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('backup_codes', sa.Text(), nullable=True),
+ sa.Column('totp_confirmed_at', sa.DateTime(), nullable=True),
+ )
+ else:
+ # Add columns that may be missing in existing installs
+ existing_cols = {c['name'] for c in inspector.get_columns('users')}
+ if 'auth_provider' not in existing_cols:
+ with op.batch_alter_table('users') as batch_op:
+ batch_op.add_column(sa.Column('auth_provider', sa.String(50), server_default='local'))
+
+ if 'applications' not in existing_tables:
+ op.create_table('applications',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('name', sa.String(100), nullable=False),
+ sa.Column('app_type', sa.String(50), nullable=False),
+ sa.Column('status', sa.String(20), server_default='stopped'),
+ sa.Column('php_version', sa.String(10), nullable=True),
+ sa.Column('python_version', sa.String(10), nullable=True),
+ sa.Column('port', sa.Integer(), nullable=True),
+ sa.Column('root_path', sa.String(500), nullable=True),
+ sa.Column('docker_image', sa.String(200), nullable=True),
+ sa.Column('container_id', sa.String(100), nullable=True),
+ sa.Column('private_slug', sa.String(50), unique=True, nullable=True, index=True),
+ sa.Column('private_url_enabled', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('environment_type', sa.String(20), server_default='standalone'),
+ sa.Column('linked_app_id', sa.Integer(), sa.ForeignKey('applications.id'), nullable=True),
+ sa.Column('shared_config', sa.Text(), nullable=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('last_deployed_at', sa.DateTime(), nullable=True),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=False),
+ )
+
+ if 'domains' not in existing_tables:
+ op.create_table('domains',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('name', sa.String(255), unique=True, nullable=False, index=True),
+ sa.Column('is_primary', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('ssl_enabled', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('ssl_certificate_path', sa.String(500), nullable=True),
+ sa.Column('ssl_key_path', sa.String(500), nullable=True),
+ sa.Column('ssl_expires_at', sa.DateTime(), nullable=True),
+ sa.Column('ssl_auto_renew', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('application_id', sa.Integer(), sa.ForeignKey('applications.id'), nullable=False),
+ )
+
+ if 'environment_variables' not in existing_tables:
+ op.create_table('environment_variables',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('application_id', sa.Integer(), sa.ForeignKey('applications.id'), nullable=False),
+ sa.Column('key', sa.String(255), nullable=False),
+ sa.Column('encrypted_value', sa.Text(), nullable=False),
+ sa.Column('is_secret', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('description', sa.String(500), nullable=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('created_by', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
+ sa.UniqueConstraint('application_id', 'key', name='unique_app_env_key'),
+ )
+
+ if 'environment_variable_history' not in existing_tables:
+ op.create_table('environment_variable_history',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('env_variable_id', sa.Integer(), nullable=False),
+ sa.Column('application_id', sa.Integer(), sa.ForeignKey('applications.id'), nullable=False),
+ sa.Column('key', sa.String(255), nullable=False),
+ sa.Column('action', sa.String(20), nullable=False),
+ sa.Column('old_value_hash', sa.String(64), nullable=True),
+ sa.Column('new_value_hash', sa.String(64), nullable=True),
+ sa.Column('changed_by', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
+ sa.Column('changed_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'notification_preferences' not in existing_tables:
+ op.create_table('notification_preferences',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), unique=True, nullable=False),
+ sa.Column('enabled', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('channels', sa.Text(), server_default='["email"]'),
+ sa.Column('severities', sa.Text(), server_default='["critical", "warning"]'),
+ sa.Column('email', sa.String(255), nullable=True),
+ sa.Column('discord_webhook', sa.String(512), nullable=True),
+ sa.Column('telegram_chat_id', sa.String(64), nullable=True),
+ sa.Column('categories', sa.Text(), server_default='{"system": true, "security": true, "backups": true, "apps": true}'),
+ sa.Column('quiet_hours_enabled', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('quiet_hours_start', sa.String(5), server_default='22:00'),
+ sa.Column('quiet_hours_end', sa.String(5), server_default='08:00'),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'deployments' not in existing_tables:
+ op.create_table('deployments',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('app_id', sa.Integer(), sa.ForeignKey('applications.id'), nullable=False),
+ sa.Column('version', sa.Integer(), nullable=False),
+ sa.Column('version_tag', sa.String(100), nullable=True),
+ sa.Column('status', sa.String(20), server_default='pending'),
+ sa.Column('build_method', sa.String(20), nullable=True),
+ sa.Column('image_tag', sa.String(255), nullable=True),
+ sa.Column('commit_hash', sa.String(40), nullable=True),
+ sa.Column('commit_message', sa.Text(), nullable=True),
+ sa.Column('container_id', sa.String(100), nullable=True),
+ sa.Column('deployed_by', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
+ sa.Column('deploy_trigger', sa.String(20), server_default='manual'),
+ sa.Column('build_log_path', sa.String(500), nullable=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('build_started_at', sa.DateTime(), nullable=True),
+ sa.Column('build_completed_at', sa.DateTime(), nullable=True),
+ sa.Column('deploy_started_at', sa.DateTime(), nullable=True),
+ sa.Column('deploy_completed_at', sa.DateTime(), nullable=True),
+ sa.Column('error_message', sa.Text(), nullable=True),
+ sa.Column('extra_data', sa.Text(), server_default='{}'),
+ )
+
+ if 'deployment_diffs' not in existing_tables:
+ op.create_table('deployment_diffs',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('deployment_id', sa.Integer(), sa.ForeignKey('deployments.id'), nullable=False),
+ sa.Column('previous_deployment_id', sa.Integer(), sa.ForeignKey('deployments.id'), nullable=True),
+ sa.Column('files_added', sa.Text(), server_default='[]'),
+ sa.Column('files_removed', sa.Text(), server_default='[]'),
+ sa.Column('files_modified', sa.Text(), server_default='[]'),
+ sa.Column('additions', sa.Integer(), server_default='0'),
+ sa.Column('deletions', sa.Integer(), server_default='0'),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'system_settings' not in existing_tables:
+ op.create_table('system_settings',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('key', sa.String(100), unique=True, nullable=False, index=True),
+ sa.Column('value', sa.Text(), nullable=True),
+ sa.Column('value_type', sa.String(20), server_default='string'),
+ sa.Column('description', sa.String(500), nullable=True),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_by', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
+ )
+
+ if 'audit_logs' not in existing_tables:
+ op.create_table('audit_logs',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('action', sa.String(100), nullable=False, index=True),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
+ sa.Column('target_type', sa.String(50), nullable=True),
+ sa.Column('target_id', sa.Integer(), nullable=True),
+ sa.Column('details', sa.Text(), nullable=True),
+ sa.Column('ip_address', sa.String(45), nullable=True),
+ sa.Column('user_agent', sa.String(500), nullable=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), index=True),
+ )
+
+ if 'metrics_history' not in existing_tables:
+ op.create_table('metrics_history',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('timestamp', sa.DateTime(), nullable=False, index=True),
+ sa.Column('level', sa.String(10), nullable=False, server_default='minute', index=True),
+ sa.Column('cpu_percent', sa.Float(), nullable=False),
+ sa.Column('cpu_percent_min', sa.Float(), nullable=True),
+ sa.Column('cpu_percent_max', sa.Float(), nullable=True),
+ sa.Column('memory_percent', sa.Float(), nullable=False),
+ sa.Column('memory_used_bytes', sa.BigInteger(), nullable=False),
+ sa.Column('memory_total_bytes', sa.BigInteger(), nullable=False),
+ sa.Column('disk_percent', sa.Float(), nullable=False),
+ sa.Column('disk_used_bytes', sa.BigInteger(), nullable=False),
+ sa.Column('disk_total_bytes', sa.BigInteger(), nullable=False),
+ sa.Column('load_1m', sa.Float(), nullable=True),
+ sa.Column('load_5m', sa.Float(), nullable=True),
+ sa.Column('load_15m', sa.Float(), nullable=True),
+ sa.Column('sample_count', sa.Integer(), server_default='1'),
+ )
+ op.create_index('idx_metrics_level_timestamp', 'metrics_history', ['level', 'timestamp'])
+
+ if 'workflows' not in existing_tables:
+ op.create_table('workflows',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('name', sa.String(100), nullable=False),
+ sa.Column('description', sa.Text(), nullable=True),
+ sa.Column('nodes', sa.Text(), nullable=True),
+ sa.Column('edges', sa.Text(), nullable=True),
+ sa.Column('viewport', sa.Text(), nullable=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=False),
+ )
+
+ if 'git_webhooks' not in existing_tables:
+ op.create_table('git_webhooks',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('name', sa.String(100), nullable=False),
+ sa.Column('source', sa.String(50), nullable=False),
+ sa.Column('source_repo_url', sa.String(500), nullable=False),
+ sa.Column('source_branch', sa.String(100), server_default='main'),
+ sa.Column('local_repo_name', sa.String(200), nullable=True),
+ sa.Column('secret', sa.String(100), nullable=False),
+ sa.Column('webhook_token', sa.String(50), nullable=False, unique=True),
+ sa.Column('sync_direction', sa.String(20), server_default='pull'),
+ sa.Column('auto_sync', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('app_id', sa.Integer(), sa.ForeignKey('applications.id'), nullable=True),
+ sa.Column('deploy_on_push', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('pre_deploy_script', sa.Text(), nullable=True),
+ sa.Column('post_deploy_script', sa.Text(), nullable=True),
+ sa.Column('zero_downtime', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('is_active', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('last_sync_at', sa.DateTime(), nullable=True),
+ sa.Column('last_sync_status', sa.String(20), nullable=True),
+ sa.Column('last_sync_message', sa.Text(), nullable=True),
+ sa.Column('sync_count', sa.Integer(), server_default='0'),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'webhook_logs' not in existing_tables:
+ op.create_table('webhook_logs',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('webhook_id', sa.Integer(), sa.ForeignKey('git_webhooks.id'), nullable=True),
+ sa.Column('source', sa.String(50), nullable=False),
+ sa.Column('event_type', sa.String(50), nullable=False),
+ sa.Column('delivery_id', sa.String(100), nullable=True),
+ sa.Column('ref', sa.String(200), nullable=True),
+ sa.Column('commit_sha', sa.String(64), nullable=True),
+ sa.Column('commit_message', sa.Text(), nullable=True),
+ sa.Column('pusher', sa.String(100), nullable=True),
+ sa.Column('status', sa.String(20), server_default='received'),
+ sa.Column('status_message', sa.Text(), nullable=True),
+ sa.Column('headers_json', sa.Text(), nullable=True),
+ sa.Column('payload_preview', sa.Text(), nullable=True),
+ sa.Column('received_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('processed_at', sa.DateTime(), nullable=True),
+ )
+
+ if 'git_deployments' not in existing_tables:
+ op.create_table('git_deployments',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('app_id', sa.Integer(), sa.ForeignKey('applications.id'), nullable=False),
+ sa.Column('webhook_id', sa.Integer(), sa.ForeignKey('git_webhooks.id'), nullable=True),
+ sa.Column('version', sa.Integer(), nullable=False),
+ sa.Column('commit_sha', sa.String(64), nullable=True),
+ sa.Column('commit_message', sa.Text(), nullable=True),
+ sa.Column('branch', sa.String(100), nullable=True),
+ sa.Column('triggered_by', sa.String(100), nullable=True),
+ sa.Column('status', sa.String(20), server_default='pending'),
+ sa.Column('started_at', sa.DateTime(), nullable=True),
+ sa.Column('completed_at', sa.DateTime(), nullable=True),
+ sa.Column('duration_seconds', sa.Integer(), nullable=True),
+ sa.Column('pre_script_output', sa.Text(), nullable=True),
+ sa.Column('deploy_output', sa.Text(), nullable=True),
+ sa.Column('post_script_output', sa.Text(), nullable=True),
+ sa.Column('error_message', sa.Text(), nullable=True),
+ sa.Column('is_rollback', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('rollback_from_version', sa.Integer(), nullable=True),
+ sa.Column('rolled_back_at', sa.DateTime(), nullable=True),
+ sa.Column('rolled_back_to_id', sa.Integer(), nullable=True),
+ sa.Column('snapshot_data', sa.Text(), nullable=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'server_groups' not in existing_tables:
+ op.create_table('server_groups',
+ sa.Column('id', sa.String(36), primary_key=True),
+ sa.Column('name', sa.String(100), nullable=False),
+ sa.Column('description', sa.Text()),
+ sa.Column('color', sa.String(7), server_default='#6366f1'),
+ sa.Column('icon', sa.String(50), server_default='server'),
+ sa.Column('parent_id', sa.String(36), sa.ForeignKey('server_groups.id'), nullable=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'servers' not in existing_tables:
+ op.create_table('servers',
+ sa.Column('id', sa.String(36), primary_key=True),
+ sa.Column('name', sa.String(100), nullable=False),
+ sa.Column('description', sa.Text()),
+ sa.Column('hostname', sa.String(255)),
+ sa.Column('ip_address', sa.String(45)),
+ sa.Column('group_id', sa.String(36), sa.ForeignKey('server_groups.id'), nullable=True),
+ sa.Column('tags', sa.JSON()),
+ sa.Column('status', sa.String(20), server_default='pending'),
+ sa.Column('last_seen', sa.DateTime()),
+ sa.Column('last_error', sa.Text()),
+ sa.Column('agent_version', sa.String(20)),
+ sa.Column('agent_id', sa.String(36), unique=True, index=True),
+ sa.Column('os_type', sa.String(20)),
+ sa.Column('os_version', sa.String(100)),
+ sa.Column('platform', sa.String(100)),
+ sa.Column('architecture', sa.String(20)),
+ sa.Column('cpu_cores', sa.Integer()),
+ sa.Column('cpu_model', sa.String(200)),
+ sa.Column('total_memory', sa.BigInteger()),
+ sa.Column('total_disk', sa.BigInteger()),
+ sa.Column('docker_version', sa.String(50)),
+ sa.Column('api_key_hash', sa.String(256)),
+ sa.Column('api_key_prefix', sa.String(12)),
+ sa.Column('api_secret_encrypted', sa.Text()),
+ sa.Column('permissions', sa.JSON()),
+ sa.Column('allowed_ips', sa.JSON()),
+ sa.Column('api_key_pending_hash', sa.String(256)),
+ sa.Column('api_key_pending_prefix', sa.String(12)),
+ sa.Column('api_secret_pending_encrypted', sa.Text()),
+ sa.Column('api_key_rotation_expires', sa.DateTime()),
+ sa.Column('api_key_rotation_id', sa.String(36)),
+ sa.Column('api_key_last_rotated', sa.DateTime()),
+ sa.Column('registration_token_hash', sa.String(256)),
+ sa.Column('registration_token_expires', sa.DateTime()),
+ sa.Column('registered_at', sa.DateTime()),
+ sa.Column('registered_by', sa.Integer(), sa.ForeignKey('users.id')),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'server_metrics' not in existing_tables:
+ op.create_table('server_metrics',
+ sa.Column('id', sa.BigInteger(), primary_key=True, autoincrement=True),
+ sa.Column('server_id', sa.String(36), sa.ForeignKey('servers.id'), nullable=False, index=True),
+ sa.Column('timestamp', sa.DateTime(), server_default=sa.func.now(), index=True),
+ sa.Column('cpu_percent', sa.Float()),
+ sa.Column('memory_percent', sa.Float()),
+ sa.Column('memory_used', sa.BigInteger()),
+ sa.Column('disk_percent', sa.Float()),
+ sa.Column('disk_used', sa.BigInteger()),
+ sa.Column('network_rx', sa.BigInteger()),
+ sa.Column('network_tx', sa.BigInteger()),
+ sa.Column('network_rx_rate', sa.Float()),
+ sa.Column('network_tx_rate', sa.Float()),
+ sa.Column('container_count', sa.Integer()),
+ sa.Column('container_running', sa.Integer()),
+ sa.Column('extra', sa.JSON()),
+ )
+ op.create_index('ix_server_metrics_server_time', 'server_metrics', ['server_id', 'timestamp'])
+
+ if 'server_commands' not in existing_tables:
+ op.create_table('server_commands',
+ sa.Column('id', sa.String(36), primary_key=True),
+ sa.Column('server_id', sa.String(36), sa.ForeignKey('servers.id'), nullable=False, index=True),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id')),
+ sa.Column('command_type', sa.String(50)),
+ sa.Column('command_data', sa.JSON()),
+ sa.Column('status', sa.String(20), server_default='pending'),
+ sa.Column('started_at', sa.DateTime()),
+ sa.Column('completed_at', sa.DateTime()),
+ sa.Column('result', sa.JSON()),
+ sa.Column('error', sa.Text()),
+ sa.Column('exit_code', sa.Integer()),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'agent_sessions' not in existing_tables:
+ op.create_table('agent_sessions',
+ sa.Column('id', sa.String(36), primary_key=True),
+ sa.Column('server_id', sa.String(36), sa.ForeignKey('servers.id'), nullable=False, index=True),
+ sa.Column('session_token', sa.String(256)),
+ sa.Column('socket_id', sa.String(100)),
+ sa.Column('connected_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('last_heartbeat', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('ip_address', sa.String(45)),
+ sa.Column('user_agent', sa.String(255)),
+ sa.Column('is_active', sa.Boolean(), server_default=sa.text('1'), index=True),
+ sa.Column('disconnected_at', sa.DateTime()),
+ sa.Column('disconnect_reason', sa.String(100)),
+ )
+
+ if 'security_alerts' not in existing_tables:
+ op.create_table('security_alerts',
+ sa.Column('id', sa.String(36), primary_key=True),
+ sa.Column('server_id', sa.String(36), sa.ForeignKey('servers.id'), nullable=True, index=True),
+ sa.Column('alert_type', sa.String(50), nullable=False, index=True),
+ sa.Column('severity', sa.String(20), nullable=False, server_default='info', index=True),
+ sa.Column('source_ip', sa.String(45)),
+ sa.Column('details', sa.JSON()),
+ sa.Column('status', sa.String(20), server_default='open', index=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), index=True),
+ sa.Column('acknowledged_at', sa.DateTime()),
+ sa.Column('acknowledged_by', sa.Integer(), sa.ForeignKey('users.id')),
+ sa.Column('resolved_at', sa.DateTime()),
+ sa.Column('resolved_by', sa.Integer(), sa.ForeignKey('users.id')),
+ )
+
+ if 'wordpress_sites' not in existing_tables:
+ op.create_table('wordpress_sites',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('application_id', sa.Integer(), sa.ForeignKey('applications.id'), unique=True, nullable=False),
+ sa.Column('wp_version', sa.String(20)),
+ sa.Column('multisite', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('admin_user', sa.String(100)),
+ sa.Column('admin_email', sa.String(200)),
+ sa.Column('db_name', sa.String(100)),
+ sa.Column('db_user', sa.String(100)),
+ sa.Column('db_host', sa.String(200), server_default='localhost'),
+ sa.Column('db_prefix', sa.String(20), server_default='wp_'),
+ sa.Column('git_repo_url', sa.String(500)),
+ sa.Column('git_branch', sa.String(100), server_default='main'),
+ sa.Column('git_paths', sa.Text()),
+ sa.Column('auto_deploy', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('last_deploy_commit', sa.String(40)),
+ sa.Column('last_deploy_at', sa.DateTime()),
+ sa.Column('is_production', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('production_site_id', sa.Integer(), sa.ForeignKey('wordpress_sites.id'), nullable=True),
+ sa.Column('sync_config', sa.Text()),
+ sa.Column('environment_type', sa.String(20), server_default='standalone'),
+ sa.Column('multidev_branch', sa.String(200)),
+ sa.Column('is_locked', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('locked_by', sa.String(100)),
+ sa.Column('locked_reason', sa.String(200)),
+ sa.Column('lock_expires_at', sa.DateTime()),
+ sa.Column('compose_project_name', sa.String(100)),
+ sa.Column('container_prefix', sa.String(100)),
+ sa.Column('resource_limits', sa.Text()),
+ sa.Column('basic_auth_enabled', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('basic_auth_user', sa.String(100)),
+ sa.Column('basic_auth_password_hash', sa.String(200)),
+ sa.Column('health_status', sa.String(20), server_default='unknown'),
+ sa.Column('last_health_check', sa.DateTime()),
+ sa.Column('disk_usage_bytes', sa.BigInteger(), server_default='0'),
+ sa.Column('disk_usage_updated_at', sa.DateTime()),
+ sa.Column('auto_sync_schedule', sa.String(100)),
+ sa.Column('auto_sync_enabled', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'database_snapshots' not in existing_tables:
+ op.create_table('database_snapshots',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('site_id', sa.Integer(), sa.ForeignKey('wordpress_sites.id'), nullable=False),
+ sa.Column('name', sa.String(200), nullable=False),
+ sa.Column('description', sa.Text()),
+ sa.Column('tag', sa.String(100)),
+ sa.Column('file_path', sa.String(500), nullable=False),
+ sa.Column('size_bytes', sa.BigInteger(), server_default='0'),
+ sa.Column('compressed', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('commit_sha', sa.String(40)),
+ sa.Column('commit_message', sa.Text()),
+ sa.Column('tables_included', sa.Text()),
+ sa.Column('row_count', sa.Integer()),
+ sa.Column('status', sa.String(20), server_default='completed'),
+ sa.Column('error_message', sa.Text()),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('expires_at', sa.DateTime()),
+ )
+
+ if 'sync_jobs' not in existing_tables:
+ op.create_table('sync_jobs',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('source_site_id', sa.Integer(), sa.ForeignKey('wordpress_sites.id'), nullable=False),
+ sa.Column('target_site_id', sa.Integer(), sa.ForeignKey('wordpress_sites.id'), nullable=False),
+ sa.Column('name', sa.String(200)),
+ sa.Column('schedule', sa.String(100)),
+ sa.Column('enabled', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('config', sa.Text()),
+ sa.Column('last_run', sa.DateTime()),
+ sa.Column('last_run_status', sa.String(20)),
+ sa.Column('last_run_duration', sa.Integer()),
+ sa.Column('last_run_error', sa.Text()),
+ sa.Column('next_run', sa.DateTime()),
+ sa.Column('run_count', sa.Integer(), server_default='0'),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'environment_activities' not in existing_tables:
+ op.create_table('environment_activities',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('site_id', sa.Integer(), sa.ForeignKey('wordpress_sites.id'), nullable=False),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
+ sa.Column('action', sa.String(50), nullable=False),
+ sa.Column('description', sa.Text()),
+ sa.Column('metadata', sa.Text()),
+ sa.Column('status', sa.String(20), server_default='completed'),
+ sa.Column('error_message', sa.Text()),
+ sa.Column('duration_seconds', sa.Float()),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'promotion_jobs' not in existing_tables:
+ op.create_table('promotion_jobs',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('source_site_id', sa.Integer(), sa.ForeignKey('wordpress_sites.id'), nullable=False),
+ sa.Column('target_site_id', sa.Integer(), sa.ForeignKey('wordpress_sites.id'), nullable=False),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
+ sa.Column('promotion_type', sa.String(20), nullable=False),
+ sa.Column('config', sa.Text()),
+ sa.Column('status', sa.String(20), server_default='pending'),
+ sa.Column('pre_promotion_snapshot_id', sa.Integer(), sa.ForeignKey('database_snapshots.id'), nullable=True),
+ sa.Column('error_message', sa.Text()),
+ sa.Column('started_at', sa.DateTime()),
+ sa.Column('completed_at', sa.DateTime()),
+ sa.Column('duration_seconds', sa.Float()),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ )
+
+ if 'sanitization_profiles' not in existing_tables:
+ op.create_table('sanitization_profiles',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=False),
+ sa.Column('name', sa.String(100), nullable=False),
+ sa.Column('description', sa.Text()),
+ sa.Column('config', sa.Text(), nullable=False),
+ sa.Column('is_default', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('is_builtin', sa.Boolean(), server_default=sa.text('0')),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime()),
+ )
+
+ if 'email_accounts' not in existing_tables:
+ op.create_table('email_accounts',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('email', sa.String(255), unique=True, nullable=False),
+ sa.Column('domain', sa.String(255), nullable=False),
+ sa.Column('username', sa.String(100), nullable=False),
+ sa.Column('quota_mb', sa.Integer(), server_default='1024'),
+ sa.Column('enabled', sa.Boolean(), server_default=sa.text('1')),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('forward_to', sa.Text(), nullable=True),
+ sa.Column('forward_keep_copy', sa.Boolean(), server_default=sa.text('1')),
+ )
+
+ if 'oauth_identities' not in existing_tables:
+ op.create_table('oauth_identities',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False, index=True),
+ sa.Column('provider', sa.String(50), nullable=False),
+ sa.Column('provider_user_id', sa.String(256), nullable=False),
+ sa.Column('provider_email', sa.String(256), nullable=True),
+ sa.Column('provider_display_name', sa.String(256), nullable=True),
+ sa.Column('access_token_encrypted', sa.Text(), nullable=True),
+ sa.Column('refresh_token_encrypted', sa.Text(), nullable=True),
+ sa.Column('token_expires_at', sa.DateTime(), nullable=True),
+ sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now()),
+ sa.Column('last_login_at', sa.DateTime(), nullable=True),
+ sa.UniqueConstraint('provider', 'provider_user_id', name='uq_provider_identity'),
+ )
+
+
+def downgrade():
+ # Drop tables in reverse dependency order
+ tables = [
+ 'oauth_identities', 'email_accounts', 'sanitization_profiles',
+ 'promotion_jobs', 'environment_activities', 'sync_jobs',
+ 'database_snapshots', 'wordpress_sites', 'security_alerts',
+ 'agent_sessions', 'server_commands', 'server_metrics', 'servers',
+ 'server_groups', 'git_deployments', 'webhook_logs', 'git_webhooks',
+ 'workflows', 'metrics_history', 'audit_logs', 'system_settings',
+ 'deployment_diffs', 'deployments', 'notification_preferences',
+ 'environment_variable_history', 'environment_variables', 'domains',
+ 'applications', 'users',
+ ]
+
+ conn = op.get_bind()
+ inspector = sa.inspect(conn)
+ existing_tables = inspector.get_table_names()
+
+ for table in tables:
+ if table in existing_tables:
+ op.drop_table(table)
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 6eba1fa..fddada7 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -5,6 +5,7 @@ Werkzeug==3.1.6
# Database
Flask-SQLAlchemy==3.1.1
SQLAlchemy==2.0.23
+Flask-Migrate==4.0.7
# Authentication
Flask-JWT-Extended==4.6.0
@@ -53,4 +54,8 @@ qrcode[pil]==7.4.2
requests==2.32.5
# S3-compatible storage (AWS S3, Backblaze B2, MinIO, Wasabi)
-boto3==1.35.0
\ No newline at end of file
+boto3==1.35.0
+
+# SSO / OAuth
+Authlib==1.5.2
+python3-saml==1.16.0
\ No newline at end of file
diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx
index bbddce4..b11023f 100644
--- a/frontend/src/App.jsx
+++ b/frontend/src/App.jsx
@@ -36,6 +36,8 @@ import WordPressProjects from './pages/WordPressProjects';
import WordPressProject from './pages/WordPressProject';
import SSLCertificates from './pages/SSLCertificates';
import Email from './pages/Email';
+import SSOCallback from './pages/SSOCallback';
+import DatabaseMigration from './pages/DatabaseMigration';
// Page title mapping
const PAGE_TITLES = {
@@ -64,6 +66,7 @@ const PAGE_TITLES = {
'/email': 'Email Server',
'/terminal': 'Terminal',
'/settings': 'Settings',
+ '/migrate': 'Database Migration',
};
function PageTitleUpdater() {
@@ -93,13 +96,17 @@ function PageTitleUpdater() {
}
function PrivateRoute({ children }) {
- const { isAuthenticated, loading, needsSetup } = useAuth();
+ const { isAuthenticated, loading, needsSetup, needsMigration } = useAuth();
if (loading) {
return Loading...
;
}
- // If setup is needed, redirect to setup
+ // Priority: migrations > setup > auth
+ if (needsMigration) {
+ return ;
+ }
+
if (needsSetup) {
return ;
}
@@ -108,13 +115,17 @@ function PrivateRoute({ children }) {
}
function PublicRoute({ children }) {
- const { isAuthenticated, loading, needsSetup } = useAuth();
+ const { isAuthenticated, loading, needsSetup, needsMigration } = useAuth();
if (loading) {
return Loading...
;
}
- // If setup is needed, redirect to setup
+ // Priority: migrations > setup > auth
+ if (needsMigration) {
+ return ;
+ }
+
if (needsSetup) {
return ;
}
@@ -142,6 +153,7 @@ function AppRoutes() {
return (
+ } />
@@ -152,6 +164,11 @@ function AppRoutes() {
} />
+
+
+
+ } />
{registrationEnabled && (
diff --git a/frontend/src/components/SSOProviderIcon.jsx b/frontend/src/components/SSOProviderIcon.jsx
new file mode 100644
index 0000000..7054c95
--- /dev/null
+++ b/frontend/src/components/SSOProviderIcon.jsx
@@ -0,0 +1,27 @@
+import React from 'react';
+import { Key } from 'lucide-react';
+
+const GoogleIcon = () => (
+
+
+
+
+
+
+);
+
+const GitHubIcon = () => (
+
+
+
+);
+
+const SSOProviderIcon = ({ provider }) => {
+ switch (provider) {
+ case 'google': return ;
+ case 'github': return ;
+ default: return ;
+ }
+};
+
+export default SSOProviderIcon;
diff --git a/frontend/src/components/ServerKitLogo.jsx b/frontend/src/components/ServerKitLogo.jsx
new file mode 100644
index 0000000..f91dc0e
--- /dev/null
+++ b/frontend/src/components/ServerKitLogo.jsx
@@ -0,0 +1,27 @@
+const ServerKitLogo = ({ width = 64, height = 64, className = '' }) => (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+);
+
+export default ServerKitLogo;
diff --git a/frontend/src/components/Sidebar.jsx b/frontend/src/components/Sidebar.jsx
index ce6ccef..f56dd00 100644
--- a/frontend/src/components/Sidebar.jsx
+++ b/frontend/src/components/Sidebar.jsx
@@ -2,13 +2,13 @@ import React, { useState, useEffect, useRef } from 'react';
import { NavLink, useNavigate } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
import { useTheme } from '../contexts/ThemeContext';
-import { Star, Settings, LogOut, Sun, Moon, Monitor, ChevronRight, ChevronUp } from 'lucide-react';
+import { Star, Settings, LogOut, Sun, Moon, Monitor, ChevronRight, ChevronUp, Layers } from 'lucide-react';
import { api } from '../services/api';
-import ServerKitLogo from '../assets/ServerKitLogo.svg';
+import ServerKitLogo from './ServerKitLogo';
const Sidebar = () => {
const { user, logout } = useAuth();
- const { theme, resolvedTheme, setTheme } = useTheme();
+ const { theme, resolvedTheme, setTheme, whiteLabel } = useTheme();
const navigate = useNavigate();
const [starAnimating, setStarAnimating] = useState(false);
const [menuOpen, setMenuOpen] = useState(false);
@@ -35,6 +35,8 @@ const Sidebar = () => {
}, []);
useEffect(() => {
+ if (whiteLabel.enabled) return;
+
let playCount = 0;
let timeoutId;
@@ -68,32 +70,63 @@ const Sidebar = () => {
clearTimeout(initialDelay);
clearTimeout(timeoutId);
};
- }, []);
+ }, [whiteLabel.enabled]);
return (
-
-
-
+ {whiteLabel.enabled ? (
+
+ {whiteLabel.mode === 'image_full' ? (
+
+ {whiteLabel.logoData ? (
+
+ ) : (
+
+ )}
+
+ ) : whiteLabel.mode === 'text_only' ? (
+
+ {whiteLabel.brandName || 'Brand'}
+
+ ) : (
+ <>
+
+ {whiteLabel.logoData ? (
+
+ ) : (
+
+ )}
+
+
+ {whiteLabel.brandName || 'Brand'}
+
+ >
+ )}
-
ServerKit
-
-
-
-
-
-
-
-
- Star us!
-
-
+ ) : (
+
+ )}
Overview
diff --git a/frontend/src/components/settings/MigrationHistoryTab.jsx b/frontend/src/components/settings/MigrationHistoryTab.jsx
new file mode 100644
index 0000000..79a7a2a
--- /dev/null
+++ b/frontend/src/components/settings/MigrationHistoryTab.jsx
@@ -0,0 +1,103 @@
+import React, { useState, useEffect } from 'react';
+import api from '../../services/api';
+import { Database, Loader, CheckCircle, ArrowUpCircle } from 'lucide-react';
+
+const MigrationHistoryTab = () => {
+ const [revisions, setRevisions] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ useEffect(() => {
+ loadHistory();
+ }, []);
+
+ async function loadHistory() {
+ try {
+ setLoading(true);
+ const data = await api.getMigrationHistory();
+ setRevisions(data.revisions || []);
+ } catch (err) {
+ setError(err.message || 'Failed to load migration history');
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ if (loading) {
+ return (
+
+
+
+ Loading migration history...
+
+
+ );
+ }
+
+ if (error) {
+ return (
+
+ );
+ }
+
+ return (
+
+
+
Database Migrations
+
+ History of all database schema versions applied to this instance.
+
+
+
+ {revisions.length === 0 ? (
+
+
+
No migration history found.
+
+ ) : (
+
+
+
+
+ Revision
+ Description
+ Status
+
+
+
+ {revisions.map((rev, i) => (
+
+
+ {rev.revision.substring(0, 16)}
+
+ {rev.description || 'Schema update'}
+
+ {rev.is_current && (
+
+ Current
+
+ )}
+ {rev.is_head && !rev.is_current && (
+
+ Pending
+
+ )}
+ {!rev.is_current && !rev.is_head && (
+ Applied
+ )}
+
+
+ ))}
+
+
+
+ )}
+
+ );
+};
+
+export default MigrationHistoryTab;
diff --git a/frontend/src/components/settings/SSOConfigTab.jsx b/frontend/src/components/settings/SSOConfigTab.jsx
new file mode 100644
index 0000000..504b701
--- /dev/null
+++ b/frontend/src/components/settings/SSOConfigTab.jsx
@@ -0,0 +1,316 @@
+import React, { useState, useEffect } from 'react';
+import api from '../../services/api';
+import SSOProviderIcon from '../SSOProviderIcon';
+import {
+ Save, RefreshCw, CheckCircle, XCircle, AlertTriangle, Shield, Globe,
+ ChevronDown, ChevronUp
+} from 'lucide-react';
+
+const PROVIDERS = [
+ { id: 'google', name: 'Google', fields: ['client_id', 'client_secret'] },
+ { id: 'github', name: 'GitHub', fields: ['client_id', 'client_secret'] },
+ {
+ id: 'oidc', name: 'OIDC', fields: [
+ 'provider_name', 'client_id', 'client_secret', 'discovery_url'
+ ]
+ },
+ {
+ id: 'saml', name: 'SAML 2.0', fields: [
+ 'entity_id', 'idp_metadata_url', 'idp_sso_url', 'idp_cert'
+ ]
+ },
+];
+
+const FIELD_LABELS = {
+ client_id: 'Client ID',
+ client_secret: 'Client Secret',
+ provider_name: 'Provider Name',
+ discovery_url: 'Discovery URL',
+ entity_id: 'SP Entity ID',
+ idp_metadata_url: 'IdP Metadata URL',
+ idp_sso_url: 'IdP SSO URL',
+ idp_cert: 'IdP Certificate (PEM)',
+};
+
+const SSOConfigTab = () => {
+ const [config, setConfig] = useState({});
+ const [loading, setLoading] = useState(true);
+ const [saving, setSaving] = useState({});
+ const [testing, setTesting] = useState({});
+ const [testResults, setTestResults] = useState({});
+ const [generalSaving, setGeneralSaving] = useState(false);
+ const [message, setMessage] = useState(null);
+ const [expandedProvider, setExpandedProvider] = useState(null);
+
+ // General settings local state
+ const [autoProvision, setAutoProvision] = useState(true);
+ const [defaultRole, setDefaultRole] = useState('developer');
+ const [forceSso, setForceSso] = useState(false);
+ const [allowedDomains, setAllowedDomains] = useState('');
+
+ useEffect(() => {
+ loadConfig();
+ }, []);
+
+ async function loadConfig() {
+ try {
+ const data = await api.getSSOConfig();
+ setConfig(data.config || {});
+ setAutoProvision(data.config?.sso_auto_provision ?? true);
+ setDefaultRole(data.config?.sso_default_role || 'developer');
+ setForceSso(data.config?.sso_force_sso ?? false);
+ const domains = data.config?.sso_allowed_domains || [];
+ setAllowedDomains(Array.isArray(domains) ? domains.join(', ') : '');
+ } catch (err) {
+ setMessage({ type: 'error', text: 'Failed to load SSO config' });
+ } finally {
+ setLoading(false);
+ }
+ }
+
+ function getFieldValue(provider, field) {
+ const key = `sso_${provider}_${field}`;
+ return config[key] ?? '';
+ }
+
+ function setFieldValue(provider, field, value) {
+ const key = `sso_${provider}_${field}`;
+ setConfig(prev => ({ ...prev, [key]: value }));
+ }
+
+ async function handleSaveProvider(providerId) {
+ setSaving(prev => ({ ...prev, [providerId]: true }));
+ setMessage(null);
+ try {
+ const provider = PROVIDERS.find(p => p.id === providerId);
+ const body = {};
+ body.enabled = config[`sso_${providerId}_enabled`] ?? false;
+ for (const field of provider.fields) {
+ body[field] = getFieldValue(providerId, field);
+ }
+ await api.updateSSOProviderConfig(providerId, body);
+ setMessage({ type: 'success', text: `${provider.name} config saved` });
+ } catch (err) {
+ setMessage({ type: 'error', text: err.message });
+ } finally {
+ setSaving(prev => ({ ...prev, [providerId]: false }));
+ }
+ }
+
+ async function handleTestProvider(providerId) {
+ setTesting(prev => ({ ...prev, [providerId]: true }));
+ setTestResults(prev => ({ ...prev, [providerId]: null }));
+ try {
+ const result = await api.testSSOProvider(providerId);
+ setTestResults(prev => ({ ...prev, [providerId]: result }));
+ } catch (err) {
+ setTestResults(prev => ({
+ ...prev,
+ [providerId]: { ok: false, error: err.message }
+ }));
+ } finally {
+ setTesting(prev => ({ ...prev, [providerId]: false }));
+ }
+ }
+
+ async function handleSaveGeneral() {
+ setGeneralSaving(true);
+ setMessage(null);
+ try {
+ const domains = allowedDomains
+ .split(',')
+ .map(d => d.trim().toLowerCase())
+ .filter(Boolean);
+ await api.updateSSOGeneralSettings({
+ sso_auto_provision: autoProvision,
+ sso_default_role: defaultRole,
+ sso_force_sso: forceSso,
+ sso_allowed_domains: domains,
+ });
+ setMessage({ type: 'success', text: 'General SSO settings saved' });
+ } catch (err) {
+ setMessage({ type: 'error', text: err.message });
+ } finally {
+ setGeneralSaving(false);
+ }
+ }
+
+ if (loading) {
+ return
Loading SSO configuration...
;
+ }
+
+ return (
+
+
+
SSO / OAuth Configuration
+
+ Configure external identity providers for single sign-on.
+
+
+
+ {message && (
+
+ {message.type === 'success' ? : }
+ {message.text}
+
+ )}
+
+ {/* General Settings */}
+
+
General Settings
+
+
+
+ setAutoProvision(e.target.checked)}
+ />
+ Auto-provision users
+
+
Automatically create accounts for new SSO users
+
+
+
+ Default role for new SSO users
+ setDefaultRole(e.target.value)}>
+ Viewer
+ Developer
+ Admin
+
+
+
+
+
+ setForceSso(e.target.checked)}
+ />
+ SSO-only mode
+
+
+ {forceSso && (
+
+ Password login will be disabled for all users.
+
+ )}
+ {!forceSso && 'Disable password login and require SSO for all users'}
+
+
+
+
+
Allowed email domains
+
setAllowedDomains(e.target.value)}
+ placeholder="company.com, example.org"
+ />
+
Comma-separated. Leave empty to allow all domains.
+
+
+
+
+ {generalSaving ? 'Saving...' : 'Save General Settings'}
+
+
+
+
+ {/* Provider Cards */}
+ {PROVIDERS.map(provider => {
+ const enabled = config[`sso_${provider.id}_enabled`] ?? false;
+ const isExpanded = expandedProvider === provider.id;
+ const testResult = testResults[provider.id];
+
+ return (
+
+
setExpandedProvider(isExpanded ? null : provider.id)}
+ >
+
+
+
{provider.name}
+ {enabled && (
+
+ Active
+
+ )}
+
+ {isExpanded ?
:
}
+
+
+ {isExpanded && (
+
+
+
+ setFieldValue(provider.id, 'enabled', e.target.checked)}
+ />
+ Enable {provider.name}
+
+
+
+ {provider.fields.map(field => (
+
+ {FIELD_LABELS[field] || field}
+ {field === 'idp_cert' ? (
+
+ ))}
+
+ {testResult && (
+
+ {testResult.ok ? : }
+ {testResult.ok ? testResult.message : testResult.error}
+
+ )}
+
+
+ handleSaveProvider(provider.id)}
+ disabled={saving[provider.id]}
+ >
+
+ {saving[provider.id] ? 'Saving...' : 'Save'}
+
+ handleTestProvider(provider.id)}
+ disabled={testing[provider.id]}
+ >
+
+ {testing[provider.id] ? 'Testing...' : 'Test Connection'}
+
+
+
+ )}
+
+ );
+ })}
+
+ );
+};
+
+export default SSOConfigTab;
diff --git a/frontend/src/contexts/AuthContext.jsx b/frontend/src/contexts/AuthContext.jsx
index 6394d28..96ef9f9 100644
--- a/frontend/src/contexts/AuthContext.jsx
+++ b/frontend/src/contexts/AuthContext.jsx
@@ -9,6 +9,10 @@ export function AuthProvider({ children }) {
const [setupStatus, setSetupStatus] = useState({
needsSetup: false,
registrationEnabled: false,
+ ssoProviders: [],
+ passwordLoginEnabled: true,
+ needsMigration: false,
+ migrationInfo: null,
checked: false
});
@@ -22,6 +26,10 @@ export function AuthProvider({ children }) {
setSetupStatus({
needsSetup: status.needs_setup,
registrationEnabled: status.registration_enabled,
+ ssoProviders: status.sso_providers || [],
+ passwordLoginEnabled: status.password_login_enabled !== false,
+ needsMigration: status.needs_migration || false,
+ migrationInfo: status.migration_info || null,
checked: true
});
@@ -54,6 +62,10 @@ export function AuthProvider({ children }) {
setSetupStatus({
needsSetup: status.needs_setup,
registrationEnabled: status.registration_enabled,
+ ssoProviders: status.sso_providers || [],
+ passwordLoginEnabled: status.password_login_enabled !== false,
+ needsMigration: status.needs_migration || false,
+ migrationInfo: status.migration_info || null,
checked: true
});
} catch (error) {
@@ -75,11 +87,12 @@ export function AuthProvider({ children }) {
async function completeOnboarding(useCases) {
await api.completeOnboarding(useCases);
- setSetupStatus({
+ setSetupStatus(prev => ({
+ ...prev,
needsSetup: false,
registrationEnabled: false,
checked: true
- });
+ }));
}
function logout() {
@@ -116,7 +129,11 @@ export function AuthProvider({ children }) {
isViewer: !!user?.role,
setupStatus,
needsSetup: setupStatus.needsSetup,
+ needsMigration: setupStatus.needsMigration,
+ migrationInfo: setupStatus.migrationInfo,
registrationEnabled: setupStatus.registrationEnabled,
+ ssoProviders: setupStatus.ssoProviders,
+ passwordLoginEnabled: setupStatus.passwordLoginEnabled,
};
return (
diff --git a/frontend/src/contexts/ThemeContext.jsx b/frontend/src/contexts/ThemeContext.jsx
index 40461d2..ba42f33 100644
--- a/frontend/src/contexts/ThemeContext.jsx
+++ b/frontend/src/contexts/ThemeContext.jsx
@@ -4,6 +4,13 @@ const ThemeContext = createContext(null);
const DEFAULT_ACCENT = '#6366f1';
+const DEFAULT_WHITE_LABEL = {
+ enabled: false,
+ mode: 'image_text', // 'image_text' | 'image_full' | 'text_only'
+ brandName: '',
+ logoData: '', // base64 data URL
+};
+
// Get the resolved theme based on current setting and OS preference
function getResolvedTheme(theme) {
if (theme === 'system') {
@@ -60,6 +67,15 @@ export function ThemeProvider({ children }) {
return localStorage.getItem('accent_color') || DEFAULT_ACCENT;
});
+ const [whiteLabel, setWhiteLabelState] = useState(() => {
+ try {
+ const stored = localStorage.getItem('white_label');
+ return stored ? { ...DEFAULT_WHITE_LABEL, ...JSON.parse(stored) } : DEFAULT_WHITE_LABEL;
+ } catch {
+ return DEFAULT_WHITE_LABEL;
+ }
+ });
+
// Update the DOM attribute and resolved theme
const applyTheme = useCallback((newTheme) => {
document.documentElement.setAttribute('data-theme', newTheme);
@@ -80,6 +96,15 @@ export function ThemeProvider({ children }) {
applyAccentToDOM(hex);
}, []);
+ // Public setter for white label config (accepts partial updates)
+ const setWhiteLabel = useCallback((partial) => {
+ setWhiteLabelState(prev => {
+ const next = { ...prev, ...partial };
+ localStorage.setItem('white_label', JSON.stringify(next));
+ return next;
+ });
+ }, []);
+
// Listen for OS theme changes when using 'system' theme
useEffect(() => {
if (theme !== 'system') return;
@@ -106,6 +131,8 @@ export function ThemeProvider({ children }) {
setTheme, // Function to change theme
accentColor, // Current accent hex color
setAccentColor, // Function to change accent color
+ whiteLabel, // White label config object
+ setWhiteLabel, // Function to update white label config
};
return (
diff --git a/frontend/src/pages/DatabaseMigration.jsx b/frontend/src/pages/DatabaseMigration.jsx
new file mode 100644
index 0000000..2712e35
--- /dev/null
+++ b/frontend/src/pages/DatabaseMigration.jsx
@@ -0,0 +1,380 @@
+import React, { useState, useEffect } from 'react';
+import { useNavigate } from 'react-router-dom';
+import { useAuth } from '../contexts/AuthContext';
+import api from '../services/api';
+import {
+ Check, Database, AlertTriangle, ArrowRight, Download,
+ Loader, CheckCircle, XCircle, RotateCcw, Shield
+} from 'lucide-react';
+import ServerKitLogo from '../components/ServerKitLogo';
+
+const TOTAL_STEPS = 4;
+const STEP_TITLES = ['Overview', 'Backup', 'Apply', 'Done'];
+
+const DatabaseMigration = () => {
+ const { isAuthenticated, isAdmin, needsMigration, migrationInfo, refreshSetupStatus, login } = useAuth();
+ const navigate = useNavigate();
+
+ const [currentStep, setCurrentStep] = useState(1);
+ const [backupResult, setBackupResult] = useState(null);
+ const [backupLoading, setBackupLoading] = useState(false);
+ const [applyLoading, setApplyLoading] = useState(false);
+ const [applyError, setApplyError] = useState(null);
+ const [migrationStatus, setMigrationStatus] = useState(null);
+
+ // Login form state (for unauthenticated users)
+ const [loginEmail, setLoginEmail] = useState('');
+ const [loginPassword, setLoginPassword] = useState('');
+ const [loginError, setLoginError] = useState('');
+ const [loginLoading, setLoginLoading] = useState(false);
+
+ useEffect(() => {
+ loadMigrationStatus();
+ }, []);
+
+ // Redirect away if no migration needed
+ useEffect(() => {
+ if (!needsMigration && migrationStatus && !migrationStatus.needs_migration) {
+ navigate('/');
+ }
+ }, [needsMigration, migrationStatus]);
+
+ async function loadMigrationStatus() {
+ try {
+ const status = await api.getMigrationStatus();
+ setMigrationStatus(status);
+ } catch (err) {
+ console.error('Failed to load migration status:', err);
+ }
+ }
+
+ async function handleLogin(e) {
+ e.preventDefault();
+ setLoginError('');
+ setLoginLoading(true);
+ try {
+ await login(loginEmail, loginPassword);
+ } catch (err) {
+ setLoginError(err.message || 'Login failed');
+ } finally {
+ setLoginLoading(false);
+ }
+ }
+
+ async function handleBackup() {
+ setBackupLoading(true);
+ try {
+ const result = await api.createMigrationBackup();
+ setBackupResult(result);
+ } catch (err) {
+ setBackupResult({ success: false, error: err.message });
+ } finally {
+ setBackupLoading(false);
+ }
+ }
+
+ async function handleApply() {
+ setApplyLoading(true);
+ setApplyError(null);
+ try {
+ await api.applyMigrations();
+ setCurrentStep(4);
+ await loadMigrationStatus();
+ } catch (err) {
+ setApplyError(err.message || 'Migration failed');
+ } finally {
+ setApplyLoading(false);
+ }
+ }
+
+ async function handleFinish() {
+ await refreshSetupStatus();
+ navigate('/');
+ }
+
+ function renderProgressBar() {
+ const items = [];
+ for (let i = 1; i <= TOTAL_STEPS; i++) {
+ if (i > 1) {
+ items.push(
+
+ );
+ }
+ let stepClass = 'wizard-progress-step';
+ if (i < currentStep) stepClass += ' completed';
+ else if (i === currentStep) stepClass += ' active';
+
+ items.push(
+
+ {i < currentStep ? : i}
+
+ );
+ }
+ return
{items}
;
+ }
+
+ const status = migrationStatus || migrationInfo || {};
+ const pendingCount = status.pending_count || 0;
+ const pendingMigrations = status.pending_migrations || [];
+
+ return (
+
+
+
+
+
Database Update Required
+
ServerKit needs to update the database before continuing
+
+
+ {renderProgressBar()}
+
+ {/* Step 1: Overview */}
+ {currentStep === 1 && (
+
+
Update Overview
+
+ A new version of ServerKit requires database changes.
+ The panel is paused until these are applied.
+
+
+
+
+ Current version
+
+ {status.current_revision ? status.current_revision.substring(0, 12) : 'none'}
+
+
+
+ Target version
+
+ {status.head_revision ? status.head_revision.substring(0, 12) : 'unknown'}
+
+
+
+ Pending updates
+ {pendingCount}
+
+
+
+ {pendingMigrations.length > 0 && (
+
+
Changes to apply:
+ {pendingMigrations.map((m, i) => (
+
+
+ {m.revision.substring(0, 12)}
+ {m.description || 'Schema update'}
+
+ ))}
+
+ )}
+
+ {!isAuthenticated && (
+
+
Admin Login Required
+
+ Sign in with an admin account to apply the update.
+
+
+
+ )}
+
+ {isAuthenticated && !isAdmin && (
+
+
+
Only admin users can apply database updates. Please sign in with an admin account.
+
+ )}
+
+
+
+
setCurrentStep(2)}
+ disabled={!isAuthenticated || !isAdmin}
+ >
+ Continue
+
+
+
+ )}
+
+ {/* Step 2: Backup */}
+ {currentStep === 2 && (
+
+
Create Backup
+
+ We recommend backing up your database before applying updates.
+
+
+
+
+
+ A backup allows you to restore your database if anything goes wrong
+ during the update process.
+
+
+
+ {!backupResult && (
+
+
+ {backupLoading ? (
+ <> Creating Backup...>
+ ) : (
+ <> Create Backup>
+ )}
+
+
+ )}
+
+ {backupResult && backupResult.success && (
+
+
+
+ Backup created successfully
+ {backupResult.path}
+
+
+ )}
+
+ {backupResult && !backupResult.success && (
+
+
+
+ Backup failed
+ {backupResult.error}
+
+
+ )}
+
+
+
setCurrentStep(1)}>
+ Back
+
+
+ {!backupResult?.success && (
+
setCurrentStep(3)}
+ >
+ Skip backup
+
+ )}
+
setCurrentStep(3)}
+ disabled={!backupResult?.success}
+ >
+ Continue
+
+
+
+
+ )}
+
+ {/* Step 3: Apply */}
+ {currentStep === 3 && (
+
+
Apply Updates
+
+ {applyLoading
+ ? 'Applying database updates. Please do not close this page...'
+ : `Ready to apply ${pendingCount} database update${pendingCount !== 1 ? 's' : ''}.`
+ }
+
+
+ {!applyLoading && !applyError && (
+
+
+ Apply Updates
+
+
+ )}
+
+ {applyLoading && (
+
+
+ Updating database schema...
+
+ )}
+
+ {applyError && (
+
+
+
+ Update failed
+ {applyError}
+
+
+ Retry
+
+
+ )}
+
+ {!applyLoading && (
+
+
setCurrentStep(2)}>
+ Back
+
+
+
+ )}
+
+ )}
+
+ {/* Step 4: Done */}
+ {currentStep === 4 && (
+
+
+
+
Database Updated Successfully
+
+ All migrations have been applied.
+ {migrationStatus?.current_revision && (
+ <> Now at revision {migrationStatus.current_revision.substring(0, 12)}.>
+ )}
+
+
+
+
+
+
+ Continue to ServerKit
+
+
+
+ )}
+
+
+ );
+};
+
+export default DatabaseMigration;
diff --git a/frontend/src/pages/Login.jsx b/frontend/src/pages/Login.jsx
index 58affc3..42e6d7f 100644
--- a/frontend/src/pages/Login.jsx
+++ b/frontend/src/pages/Login.jsx
@@ -1,8 +1,9 @@
import React, { useState, useRef, useEffect } from 'react';
-import { useNavigate, Link } from 'react-router-dom';
+import { useNavigate, Link, useLocation } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
import api from '../services/api';
-import ServerKitLogo from '../assets/ServerKitLogo.svg';
+import SSOProviderIcon from '../components/SSOProviderIcon';
+import ServerKitLogo from '../components/ServerKitLogo';
const Login = () => {
const [email, setEmail] = useState('');
@@ -17,12 +18,35 @@ const Login = () => {
const [useBackupCode, setUseBackupCode] = useState(false);
const [backupCode, setBackupCode] = useState('');
- const { login, setUser, setTokens, registrationEnabled } = useAuth();
+ const { login, setUser, setTokens, registrationEnabled, ssoProviders, passwordLoginEnabled } = useAuth();
const navigate = useNavigate();
+ const location = useLocation();
+ const [ssoLoading, setSsoLoading] = useState(null);
// Refs for TOTP input fields
const inputRefs = useRef([]);
+ // Handle incoming 2FA state from SSO callback
+ useEffect(() => {
+ if (location.state?.requires2FA) {
+ setRequires2FA(true);
+ setTempToken(location.state.tempToken);
+ }
+ }, [location.state]);
+
+ async function handleSSOLogin(provider) {
+ setSsoLoading(provider);
+ setError('');
+ try {
+ const redirectUri = `${window.location.origin}/login/callback/${provider}`;
+ const { auth_url } = await api.startSSOAuth(provider, redirectUri);
+ window.location.href = auth_url;
+ } catch (err) {
+ setError(err.message || `Failed to start ${provider} login`);
+ setSsoLoading(null);
+ }
+ }
+
async function handleSubmit(e) {
e.preventDefault();
setError('');
@@ -223,7 +247,7 @@ const Login = () => {
-
+
ServerKit
Sign in to your account
@@ -231,40 +255,64 @@ const Login = () => {
{error &&
{error}
}
-
)}
+
+
);
};
diff --git a/frontend/src/contexts/AuthContext.jsx b/frontend/src/contexts/AuthContext.jsx
index 96ef9f9..460c545 100644
--- a/frontend/src/contexts/AuthContext.jsx
+++ b/frontend/src/contexts/AuthContext.jsx
@@ -79,8 +79,8 @@ export function AuthProvider({ children }) {
return data;
}
- async function register(email, username, password) {
- const data = await api.register(email, username, password);
+ async function register(email, username, password, inviteToken) {
+ const data = await api.register(email, username, password, inviteToken);
setUser(data.user);
return data;
}
@@ -112,6 +112,14 @@ export function AuthProvider({ children }) {
return data.user;
}
+ function hasPermission(feature, level = 'read') {
+ if (!user) return false;
+ if (user.role === 'admin') return true;
+ const perms = user.permissions || {};
+ const featurePerms = perms[feature] || {};
+ return !!featurePerms[level];
+ }
+
const value = {
user,
setUser,
@@ -123,6 +131,7 @@ export function AuthProvider({ children }) {
updateUser,
refreshUser,
refreshSetupStatus,
+ hasPermission,
isAuthenticated: !!user,
isAdmin: user?.role === 'admin',
isDeveloper: user?.role === 'admin' || user?.role === 'developer',
diff --git a/frontend/src/pages/Email.jsx b/frontend/src/pages/Email.jsx
index 2b81d02..d7fb875 100644
--- a/frontend/src/pages/Email.jsx
+++ b/frontend/src/pages/Email.jsx
@@ -1,20 +1,70 @@
-import { useState, useEffect } from 'react';
+import { useState, useEffect, useCallback } from 'react';
import useTabParam from '../hooks/useTabParam';
-import api from '../services/api';
+import { api } from '../services/api';
import { useToast } from '../contexts/ToastContext';
+import Spinner from '../components/Spinner';
+import ConfirmDialog from '../components/ConfirmDialog';
-const VALID_TABS = ['overview', 'accounts', 'postfix', 'dovecot', 'spam', 'authentication', 'queue', 'webmail', 'logs'];
+const VALID_TABS = ['status', 'domains', 'accounts', 'aliases', 'forwarding', 'dns-providers', 'spam', 'webmail', 'queue'];
-const Email = () => {
+function Email() {
const [activeTab, setActiveTab] = useTabParam('/email', VALID_TABS);
const [status, setStatus] = useState(null);
const [loading, setLoading] = useState(true);
+ const [actionLoading, setActionLoading] = useState(false);
+ const [confirmDialog, setConfirmDialog] = useState(null);
- useEffect(() => {
- loadStatus();
- }, []);
+ // Domains
+ const [domains, setDomains] = useState([]);
+ const [showDomainForm, setShowDomainForm] = useState(false);
+ const [newDomain, setNewDomain] = useState({ name: '', dns_provider_id: '', dns_zone_id: '' });
+
+ // Accounts
+ const [selectedDomainId, setSelectedDomainId] = useState('');
+ const [accounts, setAccounts] = useState([]);
+ const [showAccountForm, setShowAccountForm] = useState(false);
+ const [newAccount, setNewAccount] = useState({ username: '', password: '', quota_mb: 1024 });
+ const [showPasswordModal, setShowPasswordModal] = useState(null);
+ const [newPassword, setNewPassword] = useState('');
+
+ // Aliases
+ const [aliases, setAliases] = useState([]);
+ const [showAliasForm, setShowAliasForm] = useState(false);
+ const [newAlias, setNewAlias] = useState({ source: '', destination: '' });
+ const [aliasDomainId, setAliasDomainId] = useState('');
+
+ // Forwarding
+ const [allAccounts, setAllAccounts] = useState([]);
+ const [selectedAccountId, setSelectedAccountId] = useState('');
+ const [forwardingRules, setForwardingRules] = useState([]);
+ const [showForwardForm, setShowForwardForm] = useState(false);
+ const [newForward, setNewForward] = useState({ destination: '', keep_copy: true });
+
+ // DNS Providers
+ const [providers, setProviders] = useState([]);
+ const [showProviderForm, setShowProviderForm] = useState(false);
+ const [newProvider, setNewProvider] = useState({ name: '', provider: 'cloudflare', api_key: '', api_secret: '', api_email: '', is_default: false });
+ const [providerZones, setProviderZones] = useState({});
+
+ // Spam
+ const [spamConfig, setSpamConfig] = useState(null);
+
+ // Webmail
+ const [webmailStatus, setWebmailStatus] = useState(null);
+ const [proxyDomain, setProxyDomain] = useState('');
+ const [installHostname, setInstallHostname] = useState('');
+
+ // Queue & Logs
+ const [queue, setQueue] = useState([]);
+ const [logs, setLogs] = useState([]);
+ const [logLines, setLogLines] = useState(100);
+
+ const toast = useToast();
- async function loadStatus() {
+ useEffect(() => { loadStatus(); }, []);
+
+ const loadStatus = async () => {
+ setLoading(true);
try {
const data = await api.getEmailStatus();
setStatus(data);
@@ -23,1142 +73,880 @@ const Email = () => {
} finally {
setLoading(false);
}
- }
-
- if (loading) {
- return
Loading email server status...
;
- }
-
- return (
-
-
-
-
Email Server
-
Manage Postfix, Dovecot, spam filtering, and email authentication
-
-
-
-
- setActiveTab('overview')}>
- Overview
-
- setActiveTab('accounts')}>
- Accounts
-
- setActiveTab('postfix')}>
- Postfix (SMTP)
-
- setActiveTab('dovecot')}>
- Dovecot (IMAP)
-
- setActiveTab('spam')}>
- Spam Filter
-
- setActiveTab('authentication')}>
- DKIM/SPF/DMARC
-
- setActiveTab('queue')}>
- Mail Queue
-
- setActiveTab('webmail')}>
- Webmail
-
- setActiveTab('logs')}>
- Logs
-
-
-
-
- {activeTab === 'overview' &&
}
- {activeTab === 'accounts' &&
}
- {activeTab === 'postfix' &&
}
- {activeTab === 'dovecot' &&
}
- {activeTab === 'spam' &&
}
- {activeTab === 'authentication' &&
}
- {activeTab === 'queue' &&
}
- {activeTab === 'webmail' &&
}
- {activeTab === 'logs' &&
}
-
-
- );
-};
+ };
+ const loadDomains = useCallback(async () => {
+ try {
+ const data = await api.getEmailDomains();
+ setDomains(data.domains || []);
+ } catch (err) { toast.error('Failed to load domains'); }
+ }, []);
-// ==========================================
-// OVERVIEW TAB
-// ==========================================
+ useEffect(() => { if (activeTab === 'domains') loadDomains(); }, [activeTab]);
-const OverviewTab = ({ status, onRefresh }) => {
- const { showToast } = useToast();
+ const loadAccounts = useCallback(async (domainId) => {
+ if (!domainId) return;
+ try {
+ const data = await api.getEmailAccounts(domainId);
+ setAccounts(data.accounts || []);
+ } catch (err) { toast.error('Failed to load accounts'); }
+ }, []);
- const services = [
- { key: 'postfix', label: 'Postfix', desc: 'SMTP mail transfer agent', data: status?.postfix },
- { key: 'dovecot', label: 'Dovecot', desc: 'IMAP/POP3 server', data: status?.dovecot },
- { key: 'spamassassin', label: 'SpamAssassin', desc: 'Spam filtering engine', data: status?.spamassassin },
- { key: 'opendkim', label: 'OpenDKIM', desc: 'DKIM email signing', data: status?.opendkim },
- ];
+ useEffect(() => { if (activeTab === 'accounts' && selectedDomainId) loadAccounts(selectedDomainId); }, [activeTab, selectedDomainId]);
+ useEffect(() => {
+ if (activeTab === 'accounts' && domains.length === 0) loadDomains();
+ }, [activeTab]);
- async function handleServiceAction(service, action) {
+ const loadAliases = useCallback(async (domainId) => {
+ if (!domainId) return;
try {
- if (action === 'start') await api.startEmailService(service);
- else if (action === 'stop') await api.stopEmailService(service);
- else if (action === 'restart') await api.restartEmailService(service);
- showToast(`${service} ${action}ed successfully`, 'success');
- onRefresh();
- } catch (err) {
- showToast(err.message || `Failed to ${action} ${service}`, 'error');
- }
- }
+ const data = await api.getEmailAliases(domainId);
+ setAliases(data.aliases || []);
+ } catch (err) { toast.error('Failed to load aliases'); }
+ }, []);
- return (
-
-
- {services.map(svc => (
-
-
-
-
-
{svc.label}
-
{svc.desc}
-
-
- {svc.data?.running ? 'Running' : svc.data?.installed ? 'Stopped' : 'Not Installed'}
-
-
+ useEffect(() => { if (activeTab === 'aliases' && aliasDomainId) loadAliases(aliasDomainId); }, [activeTab, aliasDomainId]);
+ useEffect(() => {
+ if (activeTab === 'aliases' && domains.length === 0) loadDomains();
+ }, [activeTab]);
- {svc.data?.version && (
-
- Version: {svc.data.version}
-
- )}
+ const loadForwarding = useCallback(async (accountId) => {
+ if (!accountId) return;
+ try {
+ const data = await api.getEmailForwarding(accountId);
+ setForwardingRules(data.rules || []);
+ } catch (err) { toast.error('Failed to load forwarding rules'); }
+ }, []);
- {svc.data?.installed && (
-
- {svc.data.running ? (
- <>
- handleServiceAction(svc.key, 'restart')}>
- Restart
-
- handleServiceAction(svc.key, 'stop')}>
- Stop
-
- >
- ) : (
- handleServiceAction(svc.key, 'start')}>
- Start
-
- )}
-
- )}
-
-
- ))}
-
-
- );
-};
+ useEffect(() => {
+ if (activeTab === 'forwarding') {
+ if (domains.length === 0) loadDomains();
+ // Load all accounts from all domains
+ const loadAll = async () => {
+ try {
+ const d = await api.getEmailDomains();
+ const all = [];
+ for (const dom of (d.domains || [])) {
+ const accts = await api.getEmailAccounts(dom.id);
+ all.push(...(accts.accounts || []).map(a => ({ ...a, domain_name: dom.name })));
+ }
+ setAllAccounts(all);
+ } catch (err) { console.error(err); }
+ };
+ loadAll();
+ }
+ }, [activeTab]);
+ useEffect(() => { if (selectedAccountId) loadForwarding(selectedAccountId); }, [selectedAccountId]);
-// ==========================================
-// ACCOUNTS TAB
-// ==========================================
+ useEffect(() => {
+ if (activeTab === 'dns-providers') {
+ api.getEmailDNSProviders().then(d => setProviders(d.providers || [])).catch(() => {});
+ }
+ }, [activeTab]);
-const AccountsTab = () => {
- const { showToast } = useToast();
- const [accounts, setAccounts] = useState([]);
- const [loading, setLoading] = useState(true);
- const [showCreate, setShowCreate] = useState(false);
- const [editingForward, setEditingForward] = useState(null);
- const [formData, setFormData] = useState({ email: '', password: '', domain: '', quota_mb: 1024 });
- const [forwardData, setForwardData] = useState({ forward_to: '', keep_copy: true });
+ useEffect(() => {
+ if (activeTab === 'spam') {
+ api.getSpamConfig().then(d => setSpamConfig(d.config || null)).catch(() => {});
+ }
+ }, [activeTab]);
useEffect(() => {
- loadAccounts();
- }, []);
+ if (activeTab === 'webmail') {
+ api.getWebmailStatus().then(d => setWebmailStatus(d)).catch(() => {});
+ }
+ }, [activeTab]);
- async function loadAccounts() {
- try {
- const data = await api.getEmailAccounts();
- setAccounts(data.accounts || []);
- } catch (err) {
- showToast('Failed to load accounts', 'error');
- } finally {
- setLoading(false);
+ useEffect(() => {
+ if (activeTab === 'queue') {
+ api.getMailQueue().then(d => setQueue(d.queue || [])).catch(() => {});
+ api.getMailLogs(logLines).then(d => setLogs(d.logs || [])).catch(() => {});
}
- }
+ }, [activeTab, logLines]);
+
+ // ── Actions ──
- async function handleCreate(e) {
+ const handleInstall = async () => {
+ setActionLoading(true);
+ try {
+ await api.installEmailServer({ hostname: installHostname || undefined });
+ toast.success('Email server installed');
+ loadStatus();
+ } catch (err) { toast.error(err.message || 'Installation failed'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleServiceControl = async (component, action) => {
+ setActionLoading(true);
+ try {
+ await api.controlEmailService(component, action);
+ toast.success(`${component} ${action} successful`);
+ loadStatus();
+ } catch (err) { toast.error(err.message || `Failed to ${action} ${component}`); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleAddDomain = async (e) => {
e.preventDefault();
+ setActionLoading(true);
try {
- const result = await api.createEmailAccount(formData);
- if (result.success) {
- showToast('Account created', 'success');
- setShowCreate(false);
- setFormData({ email: '', password: '', domain: '', quota_mb: 1024 });
- loadAccounts();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- }
- }
-
- async function handleDelete(accountId) {
- if (!confirm('Delete this email account? This cannot be undone.')) return;
+ await api.addEmailDomain(newDomain);
+ toast.success('Domain added');
+ setShowDomainForm(false);
+ setNewDomain({ name: '', dns_provider_id: '', dns_zone_id: '' });
+ loadDomains();
+ } catch (err) { toast.error(err.message || 'Failed to add domain'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleDeleteDomain = (domainId, name) => {
+ setConfirmDialog({
+ message: `Delete domain "${name}" and all its accounts and aliases?`,
+ onConfirm: async () => {
+ try {
+ await api.deleteEmailDomain(domainId);
+ toast.success('Domain deleted');
+ loadDomains();
+ } catch (err) { toast.error('Failed to delete domain'); }
+ setConfirmDialog(null);
+ },
+ onCancel: () => setConfirmDialog(null),
+ });
+ };
+
+ const handleVerifyDNS = async (domainId) => {
+ setActionLoading(true);
try {
- const result = await api.deleteEmailAccount(accountId);
- if (result.success) {
- showToast('Account deleted', 'success');
- loadAccounts();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- }
- }
-
- async function handleToggle(account) {
+ const result = await api.verifyEmailDNS(domainId);
+ if (result.all_verified) toast.success('All DNS records verified');
+ else toast.error('Some DNS records are missing');
+ loadDomains();
+ } catch (err) { toast.error('DNS verification failed'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleDeployDNS = async (domainId) => {
+ setActionLoading(true);
try {
- await api.updateEmailAccount(account.id, { enabled: !account.enabled });
- showToast(`Account ${account.enabled ? 'disabled' : 'enabled'}`, 'success');
- loadAccounts();
- } catch (err) {
- showToast(err.message, 'error');
- }
- }
+ await api.deployEmailDNS(domainId);
+ toast.success('DNS records deployed');
+ } catch (err) { toast.error(err.message || 'DNS deployment failed'); }
+ finally { setActionLoading(false); }
+ };
- async function handleForwarding(e) {
+ const handleCreateAccount = async (e) => {
e.preventDefault();
+ setActionLoading(true);
try {
- const result = await api.setEmailForwarding(editingForward, forwardData);
- if (result.success) {
- showToast('Forwarding updated', 'success');
- setEditingForward(null);
- loadAccounts();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- }
- }
-
- if (loading) return
Loading accounts...
;
-
- return (
-
-
-
Email Accounts
- setShowCreate(!showCreate)}>
- {showCreate ? 'Cancel' : 'Create Account'}
-
-
-
- {showCreate && (
-
-
-
- )}
-
- {accounts.length === 0 ? (
-
-
-
No email accounts configured yet.
-
-
- ) : (
-
-
-
-
- Email
- Domain
- Quota
- Forwarding
- Status
- Actions
-
-
-
- {accounts.map(account => (
-
- {account.email}
- {account.domain}
- {account.quota_mb} MB
- {account.forward_to || 'None'}
-
-
- {account.enabled ? 'Active' : 'Disabled'}
-
-
-
-
- handleToggle(account)}
- >
- {account.enabled ? 'Disable' : 'Enable'}
-
- {
- setEditingForward(account.id);
- setForwardData({
- forward_to: account.forward_to || '',
- keep_copy: account.forward_keep_copy !== false,
- });
- }}
- >
- Forward
-
- handleDelete(account.id)}
- >
- Delete
-
-
-
-
- ))}
-
-
-
- )}
-
- {editingForward && (
-
setEditingForward(null)}>
-
e.stopPropagation()}>
-
-
Email Forwarding
- setEditingForward(null)}>×
-
-
-
-
- setEditingForward(null)}>Cancel
- Save
-
-
-
-
- )}
-
- );
-};
-
-
-// ==========================================
-// POSTFIX TAB
-// ==========================================
-
-const PostfixTab = ({ status, onRefresh }) => {
- const { showToast } = useToast();
- const [config, setConfig] = useState(null);
- const [loading, setLoading] = useState(true);
- const [installing, setInstalling] = useState(false);
-
- useEffect(() => {
- if (status?.postfix?.installed) loadConfig();
- else setLoading(false);
- }, [status]);
-
- async function loadConfig() {
+ await api.createEmailAccount(selectedDomainId, newAccount);
+ toast.success('Account created');
+ setShowAccountForm(false);
+ setNewAccount({ username: '', password: '', quota_mb: 1024 });
+ loadAccounts(selectedDomainId);
+ } catch (err) { toast.error(err.message || 'Failed to create account'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleDeleteAccount = (accountId, email) => {
+ setConfirmDialog({
+ message: `Delete account "${email}"? This will remove the mailbox.`,
+ onConfirm: async () => {
+ try {
+ await api.deleteEmailAccount(accountId);
+ toast.success('Account deleted');
+ loadAccounts(selectedDomainId);
+ } catch (err) { toast.error('Failed to delete account'); }
+ setConfirmDialog(null);
+ },
+ onCancel: () => setConfirmDialog(null),
+ });
+ };
+
+ const handleChangePassword = async () => {
+ if (!showPasswordModal || !newPassword) return;
+ setActionLoading(true);
try {
- const data = await api.getPostfixConfig();
- setConfig(data.config || {});
- } catch (err) {
- console.error('Failed to load Postfix config:', err);
- } finally {
- setLoading(false);
- }
- }
-
- async function handleInstall() {
- setInstalling(true);
+ await api.changeEmailPassword(showPasswordModal, newPassword);
+ toast.success('Password changed');
+ setShowPasswordModal(null);
+ setNewPassword('');
+ } catch (err) { toast.error('Failed to change password'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleCreateAlias = async (e) => {
+ e.preventDefault();
+ setActionLoading(true);
try {
- const result = await api.installPostfix();
- if (result.success) {
- showToast('Postfix installed', 'success');
- onRefresh();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- } finally {
- setInstalling(false);
- }
- }
-
- async function handleSave() {
+ await api.createEmailAlias(aliasDomainId, newAlias);
+ toast.success('Alias created');
+ setShowAliasForm(false);
+ setNewAlias({ source: '', destination: '' });
+ loadAliases(aliasDomainId);
+ } catch (err) { toast.error(err.message || 'Failed to create alias'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleDeleteAlias = (aliasId) => {
+ setConfirmDialog({
+ message: 'Delete this alias?',
+ onConfirm: async () => {
+ try {
+ await api.deleteEmailAlias(aliasId);
+ toast.success('Alias deleted');
+ loadAliases(aliasDomainId);
+ } catch (err) { toast.error('Failed to delete alias'); }
+ setConfirmDialog(null);
+ },
+ onCancel: () => setConfirmDialog(null),
+ });
+ };
+
+ const handleCreateForwarding = async (e) => {
+ e.preventDefault();
+ setActionLoading(true);
try {
- const result = await api.updatePostfixConfig(config);
- if (result.success) {
- showToast('Configuration saved', 'success');
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- }
- }
-
- if (loading) return
Loading...
;
-
- if (!status?.postfix?.installed) {
- return (
-
-
-
Postfix Not Installed
-
Postfix is a high-performance mail transfer agent (MTA) used for sending and receiving email.
-
- {installing ? 'Installing...' : 'Install Postfix'}
-
-
-
- );
- }
-
- return (
-
-
-
-
Postfix Configuration
-
-
- {config && (
-
- {Object.entries(config).map(([key, value]) => (
-
- {key}
- setConfig({ ...config, [key]: e.target.value })}
- />
-
- ))}
-
- )}
-
- Save Configuration
-
-
-
-
- );
-};
-
+ await api.createEmailForwarding(selectedAccountId, newForward);
+ toast.success('Forwarding rule created');
+ setShowForwardForm(false);
+ setNewForward({ destination: '', keep_copy: true });
+ loadForwarding(selectedAccountId);
+ } catch (err) { toast.error(err.message || 'Failed to create forwarding rule'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleDeleteForwarding = (ruleId) => {
+ setConfirmDialog({
+ message: 'Delete this forwarding rule?',
+ onConfirm: async () => {
+ try {
+ await api.deleteEmailForwarding(ruleId);
+ toast.success('Rule deleted');
+ loadForwarding(selectedAccountId);
+ } catch (err) { toast.error('Failed to delete rule'); }
+ setConfirmDialog(null);
+ },
+ onCancel: () => setConfirmDialog(null),
+ });
+ };
+
+ const handleAddProvider = async (e) => {
+ e.preventDefault();
+ setActionLoading(true);
+ try {
+ await api.addEmailDNSProvider(newProvider);
+ toast.success('DNS provider added');
+ setShowProviderForm(false);
+ setNewProvider({ name: '', provider: 'cloudflare', api_key: '', api_secret: '', api_email: '', is_default: false });
+ const d = await api.getEmailDNSProviders();
+ setProviders(d.providers || []);
+ } catch (err) { toast.error(err.message || 'Failed to add provider'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleDeleteProvider = (providerId) => {
+ setConfirmDialog({
+ message: 'Delete this DNS provider?',
+ onConfirm: async () => {
+ try {
+ await api.deleteEmailDNSProvider(providerId);
+ toast.success('Provider deleted');
+ const d = await api.getEmailDNSProviders();
+ setProviders(d.providers || []);
+ } catch (err) { toast.error('Failed to delete provider'); }
+ setConfirmDialog(null);
+ },
+ onCancel: () => setConfirmDialog(null),
+ });
+ };
+
+ const handleTestProvider = async (providerId) => {
+ setActionLoading(true);
+ try {
+ const result = await api.testEmailDNSProvider(providerId);
+ if (result.success) toast.success('Connection successful');
+ else toast.error(result.error || 'Connection failed');
+ } catch (err) { toast.error('Test failed'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleListZones = async (providerId) => {
+ try {
+ const result = await api.getEmailDNSZones(providerId);
+ setProviderZones(prev => ({ ...prev, [providerId]: result.zones || [] }));
+ } catch (err) { toast.error('Failed to list zones'); }
+ };
-// ==========================================
-// DOVECOT TAB
-// ==========================================
+ const handleUpdateSpam = async () => {
+ setActionLoading(true);
+ try {
+ await api.updateSpamConfig(spamConfig);
+ toast.success('SpamAssassin config updated');
+ } catch (err) { toast.error('Failed to update config'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleUpdateSpamRules = async () => {
+ setActionLoading(true);
+ try {
+ const result = await api.updateSpamRules();
+ toast.success(result.message || 'Rules updated');
+ } catch (err) { toast.error('Failed to update rules'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleWebmailInstall = async () => {
+ setActionLoading(true);
+ try {
+ await api.installWebmail({});
+ toast.success('Roundcube installed');
+ const d = await api.getWebmailStatus();
+ setWebmailStatus(d);
+ } catch (err) { toast.error('Installation failed'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleWebmailControl = async (action) => {
+ setActionLoading(true);
+ try {
+ await api.controlWebmail(action);
+ toast.success(`Roundcube ${action} successful`);
+ const d = await api.getWebmailStatus();
+ setWebmailStatus(d);
+ } catch (err) { toast.error(`Failed to ${action}`); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleConfigureProxy = async () => {
+ if (!proxyDomain) return;
+ setActionLoading(true);
+ try {
+ await api.configureWebmailProxy(proxyDomain);
+ toast.success('Proxy configured');
+ } catch (err) { toast.error('Failed to configure proxy'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleFlushQueue = async () => {
+ setActionLoading(true);
+ try {
+ await api.flushMailQueue();
+ toast.success('Queue flushed');
+ const d = await api.getMailQueue();
+ setQueue(d.queue || []);
+ } catch (err) { toast.error('Failed to flush queue'); }
+ finally { setActionLoading(false); }
+ };
+
+ const handleDeleteQueueItem = async (queueId) => {
+ try {
+ await api.deleteMailQueueItem(queueId);
+ toast.success('Message deleted');
+ const d = await api.getMailQueue();
+ setQueue(d.queue || []);
+ } catch (err) { toast.error('Failed to delete message'); }
+ };
-const DovecotTab = ({ status, onRefresh }) => {
- const { showToast } = useToast();
- const [config, setConfig] = useState(null);
- const [loading, setLoading] = useState(true);
- const [installing, setInstalling] = useState(false);
+ // ── Render ──
- useEffect(() => {
- if (status?.dovecot?.installed) loadConfig();
- else setLoading(false);
- }, [status]);
+ if (loading) return
;
- async function loadConfig() {
- try {
- const data = await api.getDovecotConfig();
- setConfig(data.config || {});
- } catch (err) {
- console.error('Failed to load Dovecot config:', err);
- } finally {
- setLoading(false);
- }
- }
+ const isInstalled = status?.installed;
- async function handleInstall() {
- setInstalling(true);
- try {
- const result = await api.installDovecot();
- if (result.success) {
- showToast('Dovecot installed', 'success');
- onRefresh();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- } finally {
- setInstalling(false);
- }
- }
-
- if (loading) return
Loading...
;
-
- if (!status?.dovecot?.installed) {
- return (
-
-
-
Dovecot Not Installed
-
Dovecot is an IMAP and POP3 server that allows email clients to access mailboxes.
-
- {installing ? 'Installing...' : 'Install Dovecot'}
-
+ const ServiceCard = ({ name, data, component }) => (
+
+
+
+
{name}
+ {data?.version && v{data.version} }
+
+ {data?.running ? 'Running' : data?.installed ? 'Stopped' : 'Not Installed'}
+
- );
- }
-
- return (
-
-
-
-
Dovecot Configuration
-
- {status?.dovecot?.running ? 'Running' : 'Stopped'}
-
+ {data?.installed && (
+
+ handleServiceControl(component, 'restart')} disabled={actionLoading}>Restart
+ {data?.running
+ ? handleServiceControl(component, 'stop')} disabled={actionLoading}>Stop
+ : handleServiceControl(component, 'start')} disabled={actionLoading}>Start
+ }
-
- {config && Object.keys(config).length > 0 ? (
-
- {Object.entries(config).map(([key, value]) => (
-
- {key}
- {value}
-
- ))}
-
- ) : (
-
Using default Dovecot configuration.
- )}
-
-
+ )}
);
-};
-
-
-// ==========================================
-// SPAM FILTER TAB
-// ==========================================
-
-const SpamTab = ({ status, onRefresh }) => {
- const { showToast } = useToast();
- const [config, setConfig] = useState(null);
- const [loading, setLoading] = useState(true);
- const [installing, setInstalling] = useState(false);
-
- useEffect(() => {
- if (status?.spamassassin?.installed) loadConfig();
- else setLoading(false);
- }, [status]);
-
- async function loadConfig() {
- try {
- const data = await api.getSpamAssassinConfig();
- setConfig(data.config || {});
- } catch (err) {
- console.error('Failed to load SpamAssassin config:', err);
- } finally {
- setLoading(false);
- }
- }
-
- async function handleInstall() {
- setInstalling(true);
- try {
- const result = await api.installSpamAssassin();
- if (result.success) {
- showToast('SpamAssassin installed', 'success');
- onRefresh();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- } finally {
- setInstalling(false);
- }
- }
-
- async function handleSave() {
- try {
- const result = await api.updateSpamAssassinConfig(config);
- if (result.success) {
- showToast('SpamAssassin configuration saved', 'success');
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- }
- }
-
- if (loading) return
Loading...
;
-
- if (!status?.spamassassin?.installed) {
- return (
-
-
-
SpamAssassin Not Installed
-
SpamAssassin is a mail filter that identifies spam using content analysis and DNS blocklists.
-
- {installing ? 'Installing...' : 'Install SpamAssassin'}
-
-
-
- );
- }
return (
-
-
-
-
SpamAssassin Configuration
+
+
+
+
Email Server
+
Manage Postfix, Dovecot, DKIM, SpamAssassin, and Roundcube
-
- {config && (
-
- )}
-
- Save Configuration
-
+
+ Refresh
-
- );
-};
-
-
-// ==========================================
-// AUTHENTICATION TAB (DKIM/SPF/DMARC)
-// ==========================================
-
-const AuthenticationTab = ({ status, onRefresh }) => {
- const { showToast } = useToast();
- const [domain, setDomain] = useState('');
- const [selector, setSelector] = useState('mail');
- const [dnsRecords, setDnsRecords] = useState(null);
- const [installing, setInstalling] = useState(false);
- const [generating, setGenerating] = useState(false);
-
- async function handleInstallDkim() {
- setInstalling(true);
- try {
- const result = await api.installDkim();
- if (result.success) {
- showToast('OpenDKIM installed', 'success');
- onRefresh();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- } finally {
- setInstalling(false);
- }
- }
-
- async function handleGenerateKey() {
- if (!domain) {
- showToast('Please enter a domain', 'error');
- return;
- }
- setGenerating(true);
- try {
- const result = await api.generateDkimKey({ domain, selector });
- if (result.success) {
- showToast('DKIM key generated', 'success');
- loadDnsRecords();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- } finally {
- setGenerating(false);
- }
- }
- async function loadDnsRecords() {
- if (!domain) return;
- try {
- const data = await api.getEmailDnsRecords(domain);
- setDnsRecords(data.records || []);
- } catch (err) {
- showToast('Failed to load DNS records', 'error');
- }
- }
-
- return (
-
- {!status?.opendkim?.installed && (
-
-
-
OpenDKIM Not Installed
-
DKIM signs outgoing emails to prove they were sent from your server and have not been tampered with.
-
- {installing ? 'Installing...' : 'Install OpenDKIM'}
+ {!isInstalled ? (
+
+
✉
+
Email Server Not Installed
+
Install Postfix, Dovecot, OpenDKIM, and SpamAssassin to enable email hosting.
+
+
+ Hostname (e.g. mail.example.com)
+ setInstallHostname(e.target.value)} placeholder="mail.example.com" />
+
+
+ {actionLoading ? 'Installing...' : 'Install Email Server'}
- )}
+ ) : (
+ <>
+
+ {VALID_TABS.map(tab => (
+ setActiveTab(tab)}>
+ {tab === 'dns-providers' ? 'DNS Providers' : tab.charAt(0).toUpperCase() + tab.slice(1)}
+
+ ))}
+
-
-
-
Generate DKIM Key
-
-
-
-
-
Domain
-
setDomain(e.target.value)}
- placeholder="example.com"
- />
+ {/* Status Tab */}
+ {activeTab === 'status' && (
+
-
-
Selector
-
setSelector(e.target.value)}
- placeholder="mail"
- />
+ )}
+
+ {/* Domains Tab */}
+ {activeTab === 'domains' && (
+
+
+
Email Domains
+ setShowDomainForm(!showDomainForm)}>
+ {showDomainForm ? 'Cancel' : 'Add Domain'}
+
+
+ {showDomainForm && (
+
+
+
+ Domain Name
+ setNewDomain({ ...newDomain, name: e.target.value })} placeholder="example.com" required />
+
+
+
+ Add Domain
+
+
+ )}
+
+ {domains.length === 0 ? (
+
+ ) : domains.map(d => (
+
+
+
{d.name}
+ {d.is_active ? 'Active' : 'Inactive'}
+
+
+ {d.accounts_count} accounts
+ {d.aliases_count} aliases
+
+
+ DKIM
+ SPF
+ DMARC
+
+
+ handleVerifyDNS(d.id)} disabled={actionLoading}>Verify DNS
+ {d.dns_provider_id && handleDeployDNS(d.id)} disabled={actionLoading}>Deploy DNS }
+ handleDeleteDomain(d.id, d.name)}>Delete
+
+
+ ))}
+
-
-
-
- {generating ? 'Generating...' : 'Generate Key'}
-
-
- Show DNS Records
-
-
-
-
+ )}
- {dnsRecords && dnsRecords.length > 0 && (
-
-
-
Required DNS Records
-
-
-
- Add these records to your DNS settings for email authentication.
-
-
- {dnsRecords.map((record, i) => (
-
-
-
{record.type}
-
{record.purpose}
+ {/* Accounts Tab */}
+ {activeTab === 'accounts' && (
+
+
+
+ Select Domain
+ setSelectedDomainId(e.target.value)}>
+ -- Select --
+ {domains.map(d => {d.name} )}
+
+
+
+ {selectedDomainId && (
+ <>
+
+
Accounts
+ setShowAccountForm(!showAccountForm)}>
+ {showAccountForm ? 'Cancel' : 'Create Account'}
+
+
+ {showAccountForm && (
+
+
+
+ Create
+
+
+ )}
+
+ {accounts.length === 0 ? (
+
No accounts for this domain
+ ) : accounts.map(a => (
+
+
+
{a.email}
+
+ Quota: {a.quota_mb}MB
+ {a.is_active ? 'Active' : 'Disabled'}
+
+
+
+ { setShowPasswordModal(a.id); setNewPassword(''); }}>Password
+ handleDeleteAccount(a.id, a.email)}>Delete
+
+
+ ))}
-
-
-
Name:
-
{record.name}
+ >
+ )}
+ {showPasswordModal && (
+
setShowPasswordModal(null)}>
+
e.stopPropagation()}>
+
Change Password
+
+ New Password
+ setNewPassword(e.target.value)} />
-
-
Value:
-
{record.value}
+
+ setShowPasswordModal(null)}>Cancel
+ Change
- ))}
+ )}
-
-
- )}
-
- );
-};
-
-
-// ==========================================
-// MAIL QUEUE TAB
-// ==========================================
-
-const QueueTab = () => {
- const { showToast } = useToast();
- const [queue, setQueue] = useState([]);
- const [loading, setLoading] = useState(true);
- const [count, setCount] = useState(0);
-
- useEffect(() => {
- loadQueue();
- }, []);
-
- async function loadQueue() {
- setLoading(true);
- try {
- const data = await api.getMailQueue();
- setQueue(data.queue || []);
- setCount(data.count || 0);
- } catch (err) {
- showToast('Failed to load mail queue', 'error');
- } finally {
- setLoading(false);
- }
- }
-
- async function handleFlush() {
- try {
- await api.flushMailQueue();
- showToast('Queue flushed', 'success');
- loadQueue();
- } catch (err) {
- showToast(err.message, 'error');
- }
- }
-
- async function handleDelete(queueId) {
- try {
- await api.deleteQueuedMessage(queueId);
- showToast('Message deleted', 'success');
- loadQueue();
- } catch (err) {
- showToast(err.message, 'error');
- }
- }
-
- if (loading) return
Loading mail queue...
;
-
- return (
-
-
-
Mail Queue ({count} messages)
-
- Refresh
-
- Flush Queue
-
-
-
+ )}
- {queue.length === 0 ? (
-
- ) : (
-
-
-
-
- Queue ID
- Size
- Date
- Sender
- Recipients
- Actions
-
-
-
- {queue.map(item => (
-
- {item.id}
- {item.size} B
- {item.date}
- {item.sender}
- {(item.recipients || []).join(', ')}
-
- handleDelete(item.id)}
- >
- Delete
+ {/* Aliases Tab */}
+ {activeTab === 'aliases' && (
+
+
+
+ Select Domain
+ setAliasDomainId(e.target.value)}>
+ -- Select --
+ {domains.map(d => {d.name} )}
+
+
+
+ {aliasDomainId && (
+ <>
+
+
Aliases
+ setShowAliasForm(!showAliasForm)}>
+ {showAliasForm ? 'Cancel' : 'Create Alias'}
-
-
- ))}
-
-
-
- )}
-
- );
-};
-
-
-// ==========================================
-// WEBMAIL TAB
-// ==========================================
-
-const WebmailTab = () => {
- const { showToast } = useToast();
- const [status, setStatus] = useState(null);
- const [loading, setLoading] = useState(true);
- const [installing, setInstalling] = useState(false);
-
- useEffect(() => {
- loadStatus();
- }, []);
-
- async function loadStatus() {
- try {
- const data = await api.getWebmailStatus();
- setStatus(data);
- } catch (err) {
- console.error('Failed to load webmail status:', err);
- } finally {
- setLoading(false);
- }
- }
-
- async function handleInstall() {
- setInstalling(true);
- try {
- const result = await api.installWebmail();
- if (result.success) {
- showToast('Roundcube installed', 'success');
- loadStatus();
- } else {
- showToast(result.error, 'error');
- }
- } catch (err) {
- showToast(err.message, 'error');
- } finally {
- setInstalling(false);
- }
- }
-
- if (loading) return
Loading...
;
+
+ {showAliasForm && (
+
+
+
+ Create
+
+
+ )}
+
+ {aliases.length === 0 ? (
+
No aliases for this domain
+ ) : aliases.map(a => (
+
+
+
{a.source} → {a.destination}
+
+
+ handleDeleteAlias(a.id)}>Delete
+
+
+ ))}
+
+ >
+ )}
+
+ )}
- return (
-
-
-
- {status?.installed ? (
-
-
-
-
Roundcube Webmail
-
Browser-based email client for your users
+ {/* Forwarding Tab */}
+ {activeTab === 'forwarding' && (
+
+
+
+ Select Account
+ setSelectedAccountId(e.target.value)}>
+ -- Select --
+ {allAccounts.map(a => {a.email} )}
+
-
Installed
- {status.url && (
-
- Access webmail at: {status.url}
-
+ {selectedAccountId && (
+ <>
+
+
Forwarding Rules
+ setShowForwardForm(!showForwardForm)}>
+ {showForwardForm ? 'Cancel' : 'Add Rule'}
+
+
+ {showForwardForm && (
+
+
+
+ Add
+
+
+ )}
+
+ {forwardingRules.length === 0 ? (
+
+ ) : forwardingRules.map(r => (
+
+
+
{r.account_email} → {r.destination}
+
{r.keep_copy ? 'Keeps copy' : 'No copy'} · {r.is_active ? 'Active' : 'Inactive'}
+
+
+ handleDeleteForwarding(r.id)}>Delete
+
+
+ ))}
+
+ >
)}
- ) : (
-
-
Roundcube Webmail
-
Roundcube provides a browser-based interface for users to read and send email.
-
- {installing ? 'Installing...' : 'Install Roundcube'}
-
-
)}
-
-
-
- );
-};
-
-// ==========================================
-// LOGS TAB
-// ==========================================
-
-const LogsTab = () => {
- const [logs, setLogs] = useState([]);
- const [loading, setLoading] = useState(true);
- const [source, setSource] = useState(null);
-
- useEffect(() => {
- loadLogs();
- }, []);
-
- async function loadLogs() {
- setLoading(true);
- try {
- const data = await api.getMailLogs(200);
- setLogs(data.lines || []);
- setSource(data.source);
- } catch (err) {
- console.error('Failed to load mail logs:', err);
- } finally {
- setLoading(false);
- }
- }
+ {/* DNS Providers Tab */}
+ {activeTab === 'dns-providers' && (
+
+
+
DNS Providers
+ setShowProviderForm(!showProviderForm)}>
+ {showProviderForm ? 'Cancel' : 'Add Provider'}
+
+
+ {showProviderForm && (
+
+
+ Add
+
+ )}
+
+ {providers.length === 0 ? (
+
No DNS providers configured
+ ) : providers.map(p => (
+
+
+
{p.name}
+ {p.provider}
+
+
+
API Key: {p.api_key} {p.is_default && Default }
+
+
+ handleTestProvider(p.id)} disabled={actionLoading}>Test
+ handleListZones(p.id)}>Zones
+ handleDeleteProvider(p.id)}>Delete
+
+ {providerZones[p.id] && (
+
+ {providerZones[p.id].map(z => (
+
{z.name} {z.id}
+ ))}
+
+ )}
+
+ ))}
+
+
+ )}
- if (loading) return
Loading logs...
;
+ {/* Spam Tab */}
+ {activeTab === 'spam' && spamConfig && (
+
+
+
SpamAssassin Configuration
+
+ Update Rules
+ Save
+
+
+
+
+ )}
- return (
-
-
-
Mail Logs
-
- {source && Source: {source} }
- Refresh
-
-
+ {/* Webmail Tab */}
+ {activeTab === 'webmail' && (
+
+
Roundcube Webmail
+
+
+
+ {webmailStatus?.running ? 'Running' : webmailStatus?.installed ? 'Stopped' : 'Not Installed'}
+
+ {webmailStatus?.port && Port: {webmailStatus.port} }
+
+
+ {!webmailStatus?.installed ? (
+ Install Roundcube
+ ) : (
+ <>
+ {webmailStatus?.running
+ ? handleWebmailControl('stop')} disabled={actionLoading}>Stop
+ : handleWebmailControl('start')} disabled={actionLoading}>Start
+ }
+ handleWebmailControl('restart')} disabled={actionLoading}>Restart
+ >
+ )}
+
+ {webmailStatus?.installed && (
+
+
+ Proxy Domain
+ setProxyDomain(e.target.value)} placeholder="webmail.example.com" />
+
+
Configure Nginx Proxy
+
+ )}
+
+
+ )}
-
-
- {logs.length === 0 ? (
-
No mail logs available.
- ) : (
-
{logs.join('\n')}
+ {/* Queue & Logs Tab */}
+ {activeTab === 'queue' && (
+
+
+
+
Mail Queue ({queue.length})
+ Flush Queue
+
+
+ {queue.length === 0 ? (
+
+ ) : queue.map(item => (
+
+
+
{item.queue_id}
+
+ From: {item.sender}
+ Size: {item.size}B
+ {item.arrival_time}
+
+ {item.error &&
{item.error}
}
+
+
handleDeleteQueueItem(item.queue_id)}>Delete
+
+ ))}
+
+
+
+
+
Mail Logs
+
+ setLogLines(parseInt(e.target.value))}>
+ 50 lines
+ 100 lines
+ 500 lines
+
+
+
+
{logs.length > 0 ? logs.join('\n') : 'No logs available'}
+
+
)}
-
-
+ >
+ )}
+
+ {confirmDialog &&
}
);
-};
+}
export default Email;
diff --git a/frontend/src/pages/Register.jsx b/frontend/src/pages/Register.jsx
index 65ce77d..9241425 100644
--- a/frontend/src/pages/Register.jsx
+++ b/frontend/src/pages/Register.jsx
@@ -1,6 +1,7 @@
-import React, { useState } from 'react';
-import { useNavigate, Link } from 'react-router-dom';
+import React, { useState, useEffect } from 'react';
+import { useNavigate, Link, useSearchParams } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
+import api from '../services/api';
import ServerKitLogo from '../components/ServerKitLogo';
const Register = () => {
@@ -10,8 +11,83 @@ const Register = () => {
const [confirmPassword, setConfirmPassword] = useState('');
const [error, setError] = useState('');
const [loading, setLoading] = useState(false);
- const { register } = useAuth();
+ const [inviteInfo, setInviteInfo] = useState(null);
+ const [inviteLoading, setInviteLoading] = useState(false);
+ const [inviteInvalid, setInviteInvalid] = useState(false);
+ const { register, registrationEnabled } = useAuth();
const navigate = useNavigate();
+ const [searchParams] = useSearchParams();
+ const inviteToken = searchParams.get('invite');
+
+ useEffect(() => {
+ if (inviteToken) {
+ setInviteLoading(true);
+ api.validateInvitation(inviteToken)
+ .then(data => {
+ setInviteInfo(data);
+ if (data.email) setEmail(data.email);
+ })
+ .catch(() => {
+ setInviteInvalid(true);
+ })
+ .finally(() => setInviteLoading(false));
+ }
+ }, [inviteToken]);
+
+ // If no invite token and registration disabled, show message
+ if (!inviteToken && !registrationEnabled) {
+ return (
+
+
+
+
+
+
+
ServerKit
+
Registration is currently disabled
+
+
+ Already have an account? Sign in
+
+
+
+ );
+ }
+
+ if (inviteLoading) {
+ return (
+
+
+
+
+
+
+
ServerKit
+
Validating invitation...
+
+
+
+ );
+ }
+
+ if (inviteInvalid) {
+ return (
+
+
+
+
+
+
+
ServerKit
+
This invitation is invalid or has expired
+
+
+ Already have an account? Sign in
+
+
+
+ );
+ }
async function handleSubmit(e) {
e.preventDefault();
@@ -30,7 +106,7 @@ const Register = () => {
setLoading(true);
try {
- await register(email, username, password);
+ await register(email, username, password, inviteToken || undefined);
navigate('/');
} catch (err) {
setError(err.message || 'Failed to register');
@@ -47,7 +123,7 @@ const Register = () => {
ServerKit
-
Create your account
+
{inviteInfo ? `You've been invited as ${inviteInfo.role}` : 'Create your account'}
{error &&
{error}
}
@@ -62,6 +138,7 @@ const Register = () => {
onChange={(e) => setEmail(e.target.value)}
placeholder="you@example.com"
required
+ readOnly={!!inviteInfo?.email}
/>
diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx
index dd7beac..12e4204 100644
--- a/frontend/src/pages/Settings.jsx
+++ b/frontend/src/pages/Settings.jsx
@@ -6,6 +6,7 @@ import useDashboardLayout from '../hooks/useDashboardLayout';
import api from '../services/api';
import UsersTab from '../components/settings/UsersTab';
import AuditLogTab from '../components/settings/AuditLogTab';
+import ActivityTab from '../components/settings/ActivityTab';
import SSOConfigTab from '../components/settings/SSOConfigTab';
import MigrationHistoryTab from '../components/settings/MigrationHistoryTab';
import ApiSettingsTab from '../components/settings/ApiSettingsTab';
@@ -24,7 +25,7 @@ import {
} from 'lucide-react';
import ServerKitLogo from '../components/ServerKitLogo';
-const VALID_TABS = ['profile', 'security', 'appearance', 'notifications', 'system', 'users', 'audit', 'site', 'sso', 'api', 'migrations', 'developer', 'about'];
+const VALID_TABS = ['profile', 'security', 'appearance', 'notifications', 'system', 'users', 'audit', 'activity', 'site', 'sso', 'api', 'migrations', 'developer', 'about'];
const Settings = () => {
const [activeTab, setActiveTab] = useTabParam('/settings', VALID_TABS);
@@ -136,6 +137,13 @@ const Settings = () => {
Audit Log
+
setActiveTab('activity')}
+ >
+
+ Activity
+
setActiveTab('site')}
@@ -206,6 +214,7 @@ const Settings = () => {
{activeTab === 'system' && }
{activeTab === 'users' && isAdmin && }
{activeTab === 'audit' && isAdmin && }
+ {activeTab === 'activity' && isAdmin && }
{activeTab === 'site' && isAdmin && }
{activeTab === 'sso' && isAdmin && }
{activeTab === 'api' && isAdmin && }
diff --git a/frontend/src/services/api.js b/frontend/src/services/api.js
index 2e0a39b..3b6ccf1 100644
--- a/frontend/src/services/api.js
+++ b/frontend/src/services/api.js
@@ -106,10 +106,12 @@ class ApiService {
return data;
}
- async register(email, username, password) {
+ async register(email, username, password, inviteToken) {
+ const body = { email, username, password };
+ if (inviteToken) body.invite_token = inviteToken;
const data = await this.request('/auth/register', {
method: 'POST',
- body: { email, username, password },
+ body,
});
this.setTokens(data.access_token, data.refresh_token);
return data;
@@ -2913,130 +2915,53 @@ class ApiService {
return `${baseUrl}/api/servers/agent/download/${os}/${arch}`;
}
- // ==========================================
- // Email Server
- // ==========================================
-
- async getEmailStatus() {
- return this.request('/email/status');
- }
-
- async getEmailConfig() {
- return this.request('/email/config');
- }
-
- async updateEmailConfig(data) {
- return this.request('/email/config', { method: 'PUT', body: data });
- }
-
- // Postfix
- async installPostfix() {
- return this.request('/email/postfix/install', { method: 'POST' });
- }
-
- async getPostfixConfig() {
- return this.request('/email/postfix/config');
- }
-
- async updatePostfixConfig(data) {
- return this.request('/email/postfix/config', { method: 'PUT', body: data });
- }
-
- // Mail Queue
- async getMailQueue() {
- return this.request('/email/queue');
- }
-
- async flushMailQueue() {
- return this.request('/email/queue/flush', { method: 'POST' });
- }
-
- async deleteQueuedMessage(queueId) {
- return this.request(`/email/queue/${queueId}`, { method: 'DELETE' });
- }
-
- // Dovecot
- async installDovecot() {
- return this.request('/email/dovecot/install', { method: 'POST' });
- }
-
- async getDovecotConfig() {
- return this.request('/email/dovecot/config');
- }
-
+ // ── Email Server ──
+ async getEmailStatus() { return this.request('/email/status'); }
+ async installEmailServer(data = {}) { return this.request('/email/install', { method: 'POST', body: JSON.stringify(data) }); }
+ async controlEmailService(component, action) { return this.request(`/email/service/${component}/${action}`, { method: 'POST' }); }
+ // Email Domains
+ async getEmailDomains() { return this.request('/email/domains'); }
+ async addEmailDomain(data) { return this.request('/email/domains', { method: 'POST', body: JSON.stringify(data) }); }
+ async getEmailDomain(domainId) { return this.request(`/email/domains/${domainId}`); }
+ async deleteEmailDomain(domainId) { return this.request(`/email/domains/${domainId}`, { method: 'DELETE' }); }
+ async verifyEmailDNS(domainId) { return this.request(`/email/domains/${domainId}/verify-dns`, { method: 'POST' }); }
+ async deployEmailDNS(domainId) { return this.request(`/email/domains/${domainId}/deploy-dns`, { method: 'POST' }); }
// Email Accounts
- async getEmailAccounts() {
- return this.request('/email/accounts');
- }
-
- async createEmailAccount(data) {
- return this.request('/email/accounts', { method: 'POST', body: data });
- }
-
- async updateEmailAccount(accountId, data) {
- return this.request(`/email/accounts/${accountId}`, { method: 'PUT', body: data });
- }
-
- async deleteEmailAccount(accountId) {
- return this.request(`/email/accounts/${accountId}`, { method: 'DELETE' });
- }
-
- async setEmailForwarding(accountId, data) {
- return this.request(`/email/accounts/${accountId}/forwarding`, { method: 'PUT', body: data });
- }
-
+ async getEmailAccounts(domainId) { return this.request(`/email/domains/${domainId}/accounts`); }
+ async createEmailAccount(domainId, data) { return this.request(`/email/domains/${domainId}/accounts`, { method: 'POST', body: JSON.stringify(data) }); }
+ async getEmailAccount(accountId) { return this.request(`/email/accounts/${accountId}`); }
+ async updateEmailAccount(accountId, data) { return this.request(`/email/accounts/${accountId}`, { method: 'PUT', body: JSON.stringify(data) }); }
+ async deleteEmailAccount(accountId) { return this.request(`/email/accounts/${accountId}`, { method: 'DELETE' }); }
+ async changeEmailPassword(accountId, password) { return this.request(`/email/accounts/${accountId}/password`, { method: 'POST', body: JSON.stringify({ password }) }); }
+ // Email Aliases
+ async getEmailAliases(domainId) { return this.request(`/email/domains/${domainId}/aliases`); }
+ async createEmailAlias(domainId, data) { return this.request(`/email/domains/${domainId}/aliases`, { method: 'POST', body: JSON.stringify(data) }); }
+ async deleteEmailAlias(aliasId) { return this.request(`/email/aliases/${aliasId}`, { method: 'DELETE' }); }
+ // Email Forwarding
+ async getEmailForwarding(accountId) { return this.request(`/email/accounts/${accountId}/forwarding`); }
+ async createEmailForwarding(accountId, data) { return this.request(`/email/accounts/${accountId}/forwarding`, { method: 'POST', body: JSON.stringify(data) }); }
+ async updateEmailForwarding(ruleId, data) { return this.request(`/email/forwarding/${ruleId}`, { method: 'PUT', body: JSON.stringify(data) }); }
+ async deleteEmailForwarding(ruleId) { return this.request(`/email/forwarding/${ruleId}`, { method: 'DELETE' }); }
+ // DNS Providers
+ async getEmailDNSProviders() { return this.request('/email/dns-providers'); }
+ async addEmailDNSProvider(data) { return this.request('/email/dns-providers', { method: 'POST', body: JSON.stringify(data) }); }
+ async deleteEmailDNSProvider(providerId) { return this.request(`/email/dns-providers/${providerId}`, { method: 'DELETE' }); }
+ async testEmailDNSProvider(providerId) { return this.request(`/email/dns-providers/${providerId}/test`, { method: 'POST' }); }
+ async getEmailDNSZones(providerId) { return this.request(`/email/dns-providers/${providerId}/zones`); }
// SpamAssassin
- async installSpamAssassin() {
- return this.request('/email/spamassassin/install', { method: 'POST' });
- }
-
- async getSpamAssassinConfig() {
- return this.request('/email/spamassassin/config');
- }
-
- async updateSpamAssassinConfig(data) {
- return this.request('/email/spamassassin/config', { method: 'PUT', body: data });
- }
-
- // DKIM
- async installDkim() {
- return this.request('/email/dkim/install', { method: 'POST' });
- }
-
- async generateDkimKey(data) {
- return this.request('/email/dkim/generate', { method: 'POST', body: data });
- }
-
- async getEmailDnsRecords(domain) {
- return this.request(`/email/dns/${domain}`);
- }
-
- // Service Control
- async startEmailService(service) {
- return this.request(`/email/services/${service}/start`, { method: 'POST' });
- }
-
- async stopEmailService(service) {
- return this.request(`/email/services/${service}/stop`, { method: 'POST' });
- }
-
- async restartEmailService(service) {
- return this.request(`/email/services/${service}/restart`, { method: 'POST' });
- }
-
- // Webmail
- async getWebmailStatus() {
- return this.request('/email/webmail/status');
- }
-
- async installWebmail() {
- return this.request('/email/webmail/install', { method: 'POST' });
- }
-
- // Mail Logs
- async getMailLogs(lines = 100) {
- return this.request(`/email/logs?lines=${lines}`);
- }
+ async getSpamConfig() { return this.request('/email/spam/config'); }
+ async updateSpamConfig(data) { return this.request('/email/spam/config', { method: 'PUT', body: JSON.stringify(data) }); }
+ async updateSpamRules() { return this.request('/email/spam/update-rules', { method: 'POST' }); }
+ // Roundcube Webmail
+ async getWebmailStatus() { return this.request('/email/webmail/status'); }
+ async installWebmail(data = {}) { return this.request('/email/webmail/install', { method: 'POST', body: JSON.stringify(data) }); }
+ async controlWebmail(action) { return this.request(`/email/webmail/service/${action}`, { method: 'POST' }); }
+ async configureWebmailProxy(domain) { return this.request('/email/webmail/configure-proxy', { method: 'POST', body: JSON.stringify({ domain }) }); }
+ // Mail Queue & Logs
+ async getMailQueue() { return this.request('/email/queue'); }
+ async flushMailQueue() { return this.request('/email/queue/flush', { method: 'POST' }); }
+ async deleteMailQueueItem(queueId) { return this.request(`/email/queue/${queueId}`, { method: 'DELETE' }); }
+ async getMailLogs(lines = 100) { return this.request(`/email/logs?lines=${lines}`); }
// ==================== SSO / OAuth ====================
@@ -3196,6 +3121,80 @@ class ApiService {
async getApiKeyUsage(keyId, period = '24h') {
return this.request(`/api-analytics/keys/${keyId}/usage?period=${period}`);
}
+
+ // ========================================
+ // Admin - Permissions endpoints
+ // ========================================
+ async getUserPermissions(userId) {
+ return this.request(`/admin/users/${userId}/permissions`);
+ }
+
+ async updateUserPermissions(userId, permissions) {
+ return this.request(`/admin/users/${userId}/permissions`, {
+ method: 'PUT',
+ body: { permissions }
+ });
+ }
+
+ async resetUserPermissions(userId) {
+ return this.request(`/admin/users/${userId}/permissions/reset`, {
+ method: 'POST'
+ });
+ }
+
+ async getPermissionTemplates() {
+ return this.request('/admin/permissions/templates');
+ }
+
+ // ========================================
+ // Admin - Invitations endpoints
+ // ========================================
+ async getInvitations(status) {
+ const query = status ? `?status=${status}` : '';
+ return this.request(`/admin/invitations/${query}`);
+ }
+
+ async createInvitation(data) {
+ return this.request('/admin/invitations/', {
+ method: 'POST',
+ body: data
+ });
+ }
+
+ async revokeInvitation(id) {
+ return this.request(`/admin/invitations/${id}`, {
+ method: 'DELETE'
+ });
+ }
+
+ async resendInvitation(id) {
+ return this.request(`/admin/invitations/resend/${id}`, {
+ method: 'POST'
+ });
+ }
+
+ async validateInvitation(token) {
+ return this.request(`/admin/invitations/validate/${token}`);
+ }
+
+ // ========================================
+ // Admin - Activity endpoints
+ // ========================================
+ async getActivitySummary() {
+ return this.request('/admin/activity/summary');
+ }
+
+ async getActivityFeed(params = {}) {
+ const searchParams = new URLSearchParams();
+ if (params.page) searchParams.append('page', params.page);
+ if (params.per_page) searchParams.append('per_page', params.per_page);
+ if (params.user_id) searchParams.append('user_id', params.user_id);
+ if (params.action) searchParams.append('action', params.action);
+ if (params.start_date) searchParams.append('start_date', params.start_date);
+ if (params.end_date) searchParams.append('end_date', params.end_date);
+ const query = searchParams.toString();
+ return this.request(`/admin/activity/feed${query ? '?' + query : ''}`);
+ }
}
export const api = new ApiService();
diff --git a/frontend/src/styles/components/_users.less b/frontend/src/styles/components/_users.less
index eda0f84..60e3323 100644
--- a/frontend/src/styles/components/_users.less
+++ b/frontend/src/styles/components/_users.less
@@ -484,3 +484,290 @@
background: fade(@danger, 10%);
}
}
+
+// --------------------------------------------
+// Permission Editor
+// --------------------------------------------
+.permission-editor {
+ margin-top: @space-4;
+ border: 1px solid @border-default;
+ border-radius: @radius-md;
+ max-height: 400px;
+ overflow-y: auto;
+}
+
+.permission-header-row {
+ display: grid;
+ grid-template-columns: 1fr 60px 60px;
+ gap: @space-2;
+ padding: @space-2 @space-4;
+ background: @bg-hover;
+ font-size: @font-size-xs;
+ font-weight: 600;
+ color: @text-secondary;
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+ border-bottom: 1px solid @border-default;
+ position: sticky;
+ top: 0;
+ z-index: 1;
+}
+
+.permission-level-label {
+ text-align: center;
+}
+
+.permission-group {
+ &:not(:last-child) {
+ border-bottom: 1px solid @border-default;
+ }
+}
+
+.permission-group-label {
+ padding: @space-2 @space-4;
+ font-size: @font-size-xs;
+ font-weight: 600;
+ color: @text-tertiary;
+ text-transform: uppercase;
+ letter-spacing: 0.1em;
+ background: fade(@bg-hover-raw, 50%);
+}
+
+.permission-row {
+ display: grid;
+ grid-template-columns: 1fr 60px 60px;
+ gap: @space-2;
+ align-items: center;
+ padding: @space-2 @space-4;
+
+ &:hover {
+ background: @bg-hover;
+ }
+}
+
+.permission-feature-name {
+ font-size: @font-size-sm;
+}
+
+.permission-checkbox {
+ display: flex;
+ justify-content: center;
+
+ input[type="checkbox"] {
+ width: 16px;
+ height: 16px;
+ cursor: pointer;
+ }
+}
+
+.customize-permissions-section {
+ margin-top: @space-4;
+
+ > .btn {
+ margin-bottom: @space-2;
+ }
+}
+
+// --------------------------------------------
+// Invitations Section
+// --------------------------------------------
+.invitations-section {
+ margin-top: @space-8;
+ padding-top: @space-8;
+ border-top: 1px solid @border-default;
+
+ .tab-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: flex-start;
+ margin-bottom: @space-4;
+
+ .tab-header-content {
+ h4 {
+ margin: 0 0 @space-1 0;
+ font-size: @font-size-md;
+ font-weight: 600;
+ }
+
+ p {
+ margin: 0;
+ color: @text-secondary;
+ font-size: @font-size-sm;
+ }
+ }
+ }
+}
+
+.invite-link-display {
+ display: flex;
+ align-items: center;
+ gap: @space-2;
+ padding: @space-3;
+ background: @bg-hover;
+ border: 1px solid @border-default;
+ border-radius: @radius-md;
+ margin-top: @space-2;
+
+ code {
+ flex: 1;
+ font-family: @font-mono;
+ font-size: @font-size-sm;
+ word-break: break-all;
+ color: @text-primary;
+ }
+}
+
+.invitation-status {
+ display: inline-flex;
+ align-items: center;
+ padding: 3px 8px;
+ border-radius: @radius-full;
+ font-size: @font-size-xs;
+ font-weight: 500;
+ text-transform: capitalize;
+
+ &.pending {
+ background: fade(@accent-primary-raw, 15%);
+ color: @accent-primary;
+ }
+
+ &.accepted {
+ background: fade(@success, 15%);
+ color: @success;
+ }
+
+ &.expired {
+ background: fade(@text-tertiary-raw, 15%);
+ color: @text-tertiary;
+ }
+
+ &.revoked {
+ background: fade(@danger, 15%);
+ color: @danger;
+ }
+}
+
+// --------------------------------------------
+// Activity Tab
+// --------------------------------------------
+.activity-tab {
+ .tab-header {
+ margin-bottom: @space-6;
+
+ h3 {
+ margin: 0 0 @space-1 0;
+ font-size: @font-size-lg;
+ font-weight: 600;
+ }
+
+ p {
+ margin: 0;
+ color: @text-secondary;
+ font-size: @font-size-sm;
+ }
+ }
+}
+
+.activity-summary {
+ display: grid;
+ grid-template-columns: repeat(3, 1fr);
+ gap: @space-4;
+ margin-bottom: @space-6;
+
+ @media (max-width: 600px) {
+ grid-template-columns: 1fr;
+ }
+}
+
+.activity-stat-card {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ padding: @space-6;
+ background: @bg-card;
+ border: 1px solid @border-default;
+ border-radius: @radius-md;
+
+ .stat-number {
+ font-size: 2rem;
+ font-weight: 700;
+ color: @accent-primary;
+ line-height: 1;
+ }
+
+ .stat-label {
+ margin-top: @space-2;
+ font-size: @font-size-sm;
+ color: @text-secondary;
+ }
+}
+
+.most-active-users {
+ margin-bottom: @space-6;
+ padding: @space-4;
+ background: @bg-card;
+ border: 1px solid @border-default;
+ border-radius: @radius-md;
+
+ h4 {
+ margin: 0 0 @space-4 0;
+ font-size: @font-size-md;
+ font-weight: 600;
+ }
+}
+
+.active-user-item {
+ display: flex;
+ align-items: center;
+ gap: @space-3;
+ padding: @space-2 0;
+
+ &:not(:last-child) {
+ border-bottom: 1px solid @border-default;
+ }
+}
+
+.active-user-rank {
+ width: 24px;
+ text-align: center;
+ font-weight: 600;
+ font-size: @font-size-sm;
+ color: @text-tertiary;
+}
+
+.active-user-name {
+ min-width: 100px;
+ font-size: @font-size-sm;
+ font-weight: 500;
+}
+
+.active-user-bar-wrapper {
+ flex: 1;
+ height: 8px;
+ background: @bg-hover;
+ border-radius: @radius-full;
+ overflow: hidden;
+}
+
+.active-user-bar {
+ height: 100%;
+ background: @accent-primary;
+ border-radius: @radius-full;
+ transition: width 0.3s ease;
+}
+
+.active-user-count {
+ min-width: 40px;
+ text-align: right;
+ font-size: @font-size-sm;
+ font-weight: 600;
+ color: @text-secondary;
+}
+
+.activity-feed-section {
+ h4 {
+ margin: 0 0 @space-4 0;
+ font-size: @font-size-md;
+ font-weight: 600;
+ }
+}
diff --git a/frontend/src/styles/pages/_email.less b/frontend/src/styles/pages/_email.less
index 632f8ac..7fb1e11 100644
--- a/frontend/src/styles/pages/_email.less
+++ b/frontend/src/styles/pages/_email.less
@@ -3,7 +3,41 @@
// ============================================
.email-page {
- .tabs-nav {
+ .page-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: flex-start;
+ margin-bottom: @space-6;
+ flex-wrap: wrap;
+ gap: @space-4;
+
+ .page-header-content {
+ h1 {
+ font-size: 24px;
+ font-weight: @font-weight-semibold;
+ margin: 0 0 4px 0;
+ }
+
+ .page-description {
+ color: @text-secondary;
+ margin: 0;
+ }
+ }
+
+ .page-header-actions {
+ display: flex;
+ gap: @space-2;
+ }
+ }
+
+ .page-loading {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ min-height: 400px;
+ }
+
+ .tab-navigation {
display: flex;
gap: @space-2;
margin-bottom: @space-6;
@@ -11,7 +45,7 @@
padding-bottom: @space-2;
overflow-x: auto;
- .tab-btn {
+ .tab-button {
padding: @space-2 @space-4;
border: none;
background: none;
@@ -19,6 +53,7 @@
cursor: pointer;
border-radius: @radius-md;
white-space: nowrap;
+ font-size: @font-size-sm;
transition: all 0.2s;
&:hover {
@@ -32,21 +67,79 @@
}
}
}
+
+ // Not-installed state
+ .not-installed {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ justify-content: center;
+ padding: 80px @space-6;
+ background: @bg-card;
+ border-radius: @radius-lg;
+ text-align: center;
+
+ .icon {
+ font-size: 64px;
+ color: @text-tertiary;
+ margin-bottom: @space-4;
+ }
+
+ h2 {
+ margin: 0 0 @space-2 0;
+ }
+
+ p {
+ color: @text-secondary;
+ margin: 0 0 @space-6 0;
+ max-width: 480px;
+ }
+
+ .install-form {
+ display: flex;
+ flex-direction: column;
+ gap: @space-4;
+ align-items: center;
+ width: 100%;
+ max-width: 400px;
+ }
+ }
+
+ .section-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: @space-4;
+
+ h2 {
+ margin: 0;
+ font-size: @font-size-lg;
+ font-weight: @font-weight-semibold;
+ }
+
+ .section-actions {
+ display: flex;
+ gap: @space-2;
+ }
+ }
}
-// Service overview cards
-.email-overview {
- .services-grid {
+// ── Status Tab ──
+
+.email-status {
+ .status-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
gap: @space-4;
+ margin-bottom: @space-6;
}
}
.email-service-card {
- .card-body {
- padding: @space-6;
- }
+ background: @bg-card;
+ border-radius: @radius-lg;
+ border: 1px solid @border-subtle;
+ padding: @space-5;
}
.email-service-header {
@@ -61,15 +154,22 @@
font-weight: @font-weight-semibold;
}
- p {
- margin: 0;
- font-size: @font-size-sm;
+ .version {
+ font-size: @font-size-xs;
+ color: @text-tertiary;
}
}
.email-service-meta {
margin-top: @space-3;
font-size: @font-size-sm;
+ color: @text-secondary;
+
+ .meta-item {
+ display: flex;
+ justify-content: space-between;
+ padding: @space-1 0;
+ }
}
.email-service-actions {
@@ -80,160 +180,394 @@
border-top: 1px solid @border-subtle;
}
-// Accounts section
-.email-accounts {
- .section-header {
+// ── Domains Tab ──
+
+.email-domains {
+ .domain-list {
+ display: flex;
+ flex-direction: column;
+ gap: @space-4;
+ }
+}
+
+.domain-card {
+ background: @bg-card;
+ border-radius: @radius-lg;
+ border: 1px solid @border-subtle;
+ padding: @space-5;
+
+ .domain-header {
display: flex;
justify-content: space-between;
- align-items: center;
- margin-bottom: @space-4;
+ align-items: flex-start;
+ margin-bottom: @space-3;
- h2 {
+ h3 {
margin: 0;
+ font-size: @font-size-md;
}
}
-}
-.email-create-form {
- margin-bottom: @space-4;
+ .domain-stats {
+ display: flex;
+ gap: @space-6;
+ margin-bottom: @space-3;
+ font-size: @font-size-sm;
+ color: @text-secondary;
- .form-grid {
- display: grid;
- grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
- gap: @space-4;
+ span {
+ display: flex;
+ align-items: center;
+ gap: @space-1;
+ }
}
-}
-// Postfix / Dovecot config
-.email-postfix,
-.email-dovecot,
-.email-spam {
- .form-grid {
- display: grid;
- grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
- gap: @space-4;
+ .domain-dns {
+ display: flex;
+ gap: @space-2;
+ flex-wrap: wrap;
+ margin-bottom: @space-3;
+
+ .dns-badge {
+ font-size: @font-size-xs;
+ padding: 2px @space-2;
+ border-radius: @radius-sm;
+ background: @bg-elevated;
+ border: 1px solid @border-subtle;
+ color: @text-secondary;
+
+ &.verified {
+ background: @bg-elevated;
+ border-color: @success;
+ color: @success;
+ }
+
+ &.missing {
+ background: @bg-elevated;
+ border-color: @danger;
+ color: @danger;
+ }
+ }
+ }
+
+ .domain-actions {
+ display: flex;
+ gap: @space-2;
+ padding-top: @space-3;
+ border-top: 1px solid @border-subtle;
}
}
-// Config list (read-only)
-.config-list {
- display: flex;
- flex-direction: column;
- gap: @space-2;
+// ── Accounts Tab ──
+
+.email-accounts {
+ .domain-selector {
+ margin-bottom: @space-4;
+ }
+
+ .accounts-list {
+ display: flex;
+ flex-direction: column;
+ gap: @space-3;
+ }
}
-.config-item {
+.account-card {
+ background: @bg-card;
+ border-radius: @radius-md;
+ border: 1px solid @border-subtle;
+ padding: @space-4;
display: flex;
+ justify-content: space-between;
+ align-items: center;
gap: @space-4;
- padding: @space-2 0;
- border-bottom: 1px solid @border-subtle;
- font-size: @font-size-sm;
+ flex-wrap: wrap;
- &:last-child {
- border-bottom: none;
+ .account-info {
+ .account-email {
+ font-weight: @font-weight-medium;
+ margin-bottom: @space-1;
+ }
+
+ .account-meta {
+ font-size: @font-size-sm;
+ color: @text-secondary;
+ display: flex;
+ gap: @space-4;
+ }
}
- .config-key {
- color: @text-secondary;
- min-width: 200px;
- font-family: @font-mono;
- font-size: @font-size-xs;
+ .account-actions {
+ display: flex;
+ gap: @space-2;
}
+}
- .config-value {
- color: @text-primary;
- word-break: break-all;
+// ── Form styles ──
+
+.email-form {
+ background: @bg-card;
+ border-radius: @radius-lg;
+ border: 1px solid @border-subtle;
+ padding: @space-5;
+ margin-bottom: @space-4;
+
+ .form-title {
+ font-size: @font-size-md;
+ font-weight: @font-weight-semibold;
+ margin: 0 0 @space-4 0;
+ }
+
+ .form-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
+ gap: @space-4;
+ margin-bottom: @space-4;
+ }
+
+ .form-actions {
+ display: flex;
+ gap: @space-2;
+ justify-content: flex-end;
}
}
-// DNS records
-.dns-records-list {
- display: flex;
- flex-direction: column;
- gap: @space-4;
+// ── Aliases & Forwarding ──
+
+.email-aliases,
+.email-forwarding {
+ .items-list {
+ display: flex;
+ flex-direction: column;
+ gap: @space-3;
+ }
}
-.dns-record-item {
- padding: @space-4;
- background: @bg-elevated;
+.alias-card,
+.forwarding-card {
+ background: @bg-card;
border-radius: @radius-md;
border: 1px solid @border-subtle;
-}
-
-.dns-record-header {
+ padding: @space-4;
display: flex;
+ justify-content: space-between;
align-items: center;
- gap: @space-3;
- margin-bottom: @space-3;
-}
+ gap: @space-4;
+ flex-wrap: wrap;
+
+ .item-info {
+ .item-mapping {
+ font-weight: @font-weight-medium;
+ margin-bottom: @space-1;
+ display: flex;
+ align-items: center;
+ gap: @space-2;
+
+ .arrow {
+ color: @text-tertiary;
+ }
+ }
-.dns-record-purpose {
- font-size: @font-size-sm;
- color: @text-secondary;
+ .item-meta {
+ font-size: @font-size-sm;
+ color: @text-secondary;
+ }
+ }
+
+ .item-actions {
+ display: flex;
+ gap: @space-2;
+ }
}
-.dns-record-details {
- display: flex;
- flex-direction: column;
- gap: @space-2;
+// ── DNS Providers Tab ──
+
+.email-dns-providers {
+ .provider-list {
+ display: flex;
+ flex-direction: column;
+ gap: @space-4;
+ }
}
-.dns-field {
- display: flex;
- align-items: flex-start;
- gap: @space-2;
+.provider-card {
+ background: @bg-card;
+ border-radius: @radius-lg;
+ border: 1px solid @border-subtle;
+ padding: @space-5;
+
+ .provider-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: flex-start;
+ margin-bottom: @space-3;
+
+ h3 {
+ margin: 0;
+ font-size: @font-size-md;
+ }
- .dns-label {
+ .provider-type {
+ font-size: @font-size-xs;
+ padding: 2px @space-2;
+ border-radius: @radius-sm;
+ background: @bg-elevated;
+ text-transform: uppercase;
+ letter-spacing: 0.5px;
+ }
+ }
+
+ .provider-meta {
font-size: @font-size-sm;
- color: @text-tertiary;
- min-width: 50px;
+ color: @text-secondary;
+ margin-bottom: @space-3;
+
+ .meta-row {
+ display: flex;
+ gap: @space-4;
+ padding: @space-1 0;
+ }
}
- code {
- font-family: @font-mono;
- font-size: @font-size-xs;
- color: @text-primary;
- background: @bg-secondary;
- padding: @space-1 @space-2;
- border-radius: @radius-sm;
- word-break: break-all;
+ .provider-actions {
+ display: flex;
+ gap: @space-2;
+ padding-top: @space-3;
+ border-top: 1px solid @border-subtle;
+ }
+
+ .zones-list {
+ margin-top: @space-3;
+ padding: @space-3;
+ background: @bg-elevated;
+ border-radius: @radius-md;
+ font-size: @font-size-sm;
+
+ .zone-item {
+ display: flex;
+ justify-content: space-between;
+ padding: @space-1 0;
+
+ &:not(:last-child) {
+ border-bottom: 1px solid @border-subtle;
+ }
+ }
}
}
-// Mail queue
-.email-queue {
- .section-header {
+// ── SpamAssassin Config ──
+
+.email-spam {
+ .spam-config {
+ background: @bg-card;
+ border-radius: @radius-lg;
+ border: 1px solid @border-subtle;
+ padding: @space-5;
+ }
+
+ .form-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(280px, 1fr));
+ gap: @space-4;
+ margin-bottom: @space-4;
+ }
+
+ .checkbox-field {
display: flex;
- justify-content: space-between;
align-items: center;
- margin-bottom: @space-4;
+ gap: @space-2;
- h2 {
- margin: 0;
+ label {
+ font-size: @font-size-sm;
}
}
}
-// Webmail
+// ── Webmail ──
+
.email-webmail {
- .webmail-status {
- padding: @space-2;
+ .webmail-card {
+ background: @bg-card;
+ border-radius: @radius-lg;
+ border: 1px solid @border-subtle;
+ padding: @space-5;
+
+ .webmail-status-row {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ margin-bottom: @space-4;
+ }
+
+ .webmail-info {
+ font-size: @font-size-sm;
+ color: @text-secondary;
+ margin-bottom: @space-4;
+ }
+
+ .webmail-actions {
+ display: flex;
+ gap: @space-2;
+ flex-wrap: wrap;
+ }
+ }
+
+ .proxy-form {
+ margin-top: @space-4;
+ padding-top: @space-4;
+ border-top: 1px solid @border-subtle;
}
}
-// Logs
-.email-logs {
- .section-header {
+// ── Mail Queue ──
+
+.email-queue {
+ .queue-list {
display: flex;
- justify-content: space-between;
- align-items: center;
- margin-bottom: @space-4;
+ flex-direction: column;
+ gap: @space-3;
+ }
+}
- h2 {
- margin: 0;
+.queue-item {
+ background: @bg-card;
+ border-radius: @radius-md;
+ border: 1px solid @border-subtle;
+ padding: @space-4;
+ display: flex;
+ justify-content: space-between;
+ align-items: flex-start;
+ gap: @space-4;
+
+ .queue-info {
+ flex: 1;
+
+ .queue-id {
+ font-family: @font-mono;
+ font-size: @font-size-sm;
+ font-weight: @font-weight-medium;
+ margin-bottom: @space-1;
+ }
+
+ .queue-meta {
+ font-size: @font-size-sm;
+ color: @text-secondary;
+ display: flex;
+ flex-wrap: wrap;
+ gap: @space-3;
+ }
+
+ .queue-error {
+ font-size: @font-size-sm;
+ color: @danger;
+ margin-top: @space-1;
}
}
+}
+
+// ── Logs ──
+.email-logs {
.log-output {
font-family: @font-mono;
font-size: @font-size-xs;
@@ -249,17 +583,58 @@
word-break: break-all;
margin: 0;
}
+
+ .log-controls {
+ display: flex;
+ gap: @space-2;
+ margin-bottom: @space-3;
+ align-items: center;
+ }
}
-// Authentication tab
-.email-authentication {
- display: flex;
- flex-direction: column;
- gap: @space-4;
+// ── Shared ──
- .form-grid {
- display: grid;
- grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
- gap: @space-4;
+.status-badge {
+ display: inline-flex;
+ align-items: center;
+ gap: @space-1;
+ padding: 2px @space-2;
+ border-radius: @radius-sm;
+ font-size: @font-size-xs;
+ font-weight: @font-weight-medium;
+
+ &::before {
+ content: '';
+ width: 6px;
+ height: 6px;
+ border-radius: 50%;
+ }
+
+ &.online {
+ color: @success;
+ background: @bg-elevated;
+
+ &::before {
+ background: @success;
+ }
+ }
+
+ &.offline {
+ color: @danger;
+ background: @bg-elevated;
+
+ &::before {
+ background: @danger;
+ }
+ }
+}
+
+.empty-state {
+ text-align: center;
+ padding: @space-8;
+ color: @text-secondary;
+
+ p {
+ margin: 0;
}
}
From fb501af7e16346923b65a1be623bac3c83ba426a Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Wed, 4 Mar 2026 02:49:30 -0500
Subject: [PATCH 12/18] Update ROADMAP: bump version and reorganize plans
Bump current development version to v1.5.0 and reorganize the roadmap to reflect completed work, reprioritize upcoming work, and add several new planned phases. Marked many items as completed (Team & Permissions, API Enhancements, SSO/OAuth, Database Migrations/Alembic, Multi-Server, Git Deployment, Advanced Security) and replaced older phase entries with new high-priority initiatives such as the New UI & Services page, Customizable Sidebar & Dashboard Views, Migration Wizard frontend, Container Logs & Monitoring UI, DNS management, Nginx advanced config, and expanded planned phases through v3.0. Also updated the version table to show completed and current releases and adjusted the contribution priority list to highlight UI, DNS, status page, and Nginx work. These changes keep the roadmap accurate with recent progress and surface critical next steps (notably merging the new-ui branch and migration UX).
---
README.md | 126 ++++++++++-----
ROADMAP.md | 449 +++++++++++++++++++++++++++++++++++++++++------------
2 files changed, 440 insertions(+), 135 deletions(-)
diff --git a/README.md b/README.md
index f3f53c2..6aec4e5 100644
--- a/README.md
+++ b/README.md
@@ -18,6 +18,7 @@ English | [Español](docs/README.es.md) | [中文版](docs/README.zh-CN.md) | [P

[](https://discord.gg/ZKk6tkCQfG)
+[](https://github.com/jhd3197/ServerKit/stargazers)
[](LICENSE)
[](https://python.org)
[](https://reactjs.org)
@@ -41,55 +42,89 @@ English | [Español](docs/README.es.md) | [中文版](docs/README.zh-CN.md) | [P
## 🎯 Features
-### Apps & Containers
+### 🚀 Apps & Deployment
-🐘 **PHP / WordPress** — PHP-FPM 8.x with one-click WordPress installation
+**PHP / WordPress** — PHP-FPM 8.x with one-click WordPress installation
-🐍 **Python Apps** — Deploy Flask and Django with Gunicorn
+**Python Apps** — Deploy Flask and Django with Gunicorn
-🟢 **Node.js** — PM2-managed applications with log streaming
+**Node.js** — PM2-managed applications with log streaming
-🐳 **Docker** — Full container and Docker Compose management
+**Docker** — Full container and Docker Compose management
-🔑 **Environment Variables** — Secure, encrypted per-app variable management
+**Environment Variables** — Secure, encrypted per-app variable management
-### Infrastructure
+**Git Deployment** — GitHub/GitLab webhooks, auto-deploy on push, branch selection, rollback, zero-downtime deployments
-🌐 **Domain Management** — Nginx virtual hosts with easy configuration
+### 🏗️ Infrastructure
-🔒 **SSL Certificates** — Automatic Let's Encrypt with auto-renewal
+**Domain Management** — Nginx virtual hosts with easy configuration
-🗄️ **Databases** — MySQL/MariaDB and PostgreSQL support
+**SSL Certificates** — Automatic Let's Encrypt with auto-renewal
-🛡️ **Firewall (UFW)** — Visual firewall rule management
+**Databases** — MySQL/MariaDB and PostgreSQL with user management and query interface
-⏰ **Cron Jobs** — Schedule tasks with a visual editor
+**Firewall** — UFW/firewalld with visual rule management and port presets
-📁 **File Manager** — Browse and edit files via web interface
+**Cron Jobs** — Schedule tasks with a visual editor
-📡 **FTP Server** — Manage vsftpd users and access
+**File Manager** — Browse, edit, upload, and download files via web interface
-### Security
+**FTP Server** — Manage vsftpd users and access
-🔐 **Two-Factor Auth** — TOTP-based with backup codes
+**Backup & Restore** — Automated backups to S3, Backblaze B2, or local storage with scheduling, retention policies, and one-click restore
-🦠 **Malware Scanning** — ClamAV integration with quarantine
+**Email Server** — Postfix + Dovecot with DKIM/SPF/DMARC, SpamAssassin, Roundcube webmail, email forwarding rules
-📋 **File Integrity Monitoring** — Detect unauthorized file changes
+### 🔒 Security
-🚨 **Security Alerts** — Real-time threat notifications
+**Two-Factor Auth** — TOTP-based with backup codes
-🧱 **Fail2ban & SSH** — Brute force protection and SSH key management
+**Malware Scanning** — ClamAV integration with quarantine
-### Monitoring & Alerts
+**File Integrity Monitoring** — Detect unauthorized file changes
-📊 **Real-time Metrics** — CPU, RAM, disk, network monitoring via WebSocket
+**Fail2ban & SSH** — Brute force protection, SSH key management, IP allowlist/blocklist
-📈 **Uptime Tracking** — Historical server uptime data
+**Vulnerability Scanning** — Lynis security audits with reports and recommendations
-🔔 **Notifications** — Discord, Slack, Telegram, and generic webhooks
+**Automatic Updates** — unattended-upgrades / dnf-automatic for OS-level patching
-🖥️ **Multi-Server** — Agent-based remote server monitoring and management
+### 🖥️ Multi-Server Management
+
+**Agent-Based Architecture** — Go agent with HMAC-SHA256 authentication and real-time WebSocket gateway
+
+**Fleet Overview** — Centralized dashboard with server grouping, tagging, and health monitoring
+
+**Remote Docker** — Manage containers, images, volumes, networks, and Compose projects across all servers
+
+**API Key Rotation** — Secure credential rotation with acknowledgment handshake
+
+**Cross-Server Metrics** — Historical metrics with comparison charts and retention policies
+
+### 📊 Monitoring & Alerts
+
+**Real-time Metrics** — CPU, RAM, disk, network monitoring via WebSocket
+
+**Uptime Tracking** — Historical server uptime data and visualization
+
+**Notifications** — Discord, Slack, Telegram, email (HTML templates), and generic webhooks
+
+**Per-User Preferences** — Individual notification channels, severity filters, and quiet hours
+
+### 👥 Team & Access Control
+
+**Multi-User** — Admin, developer, and viewer roles with team invitations
+
+**RBAC** — Granular per-feature permissions (read/write per module)
+
+**SSO & OAuth** — Google, GitHub, OpenID Connect, and SAML 2.0 with account linking
+
+**Audit Logging** — Track all user actions with detailed activity dashboard
+
+**API Keys** — Tiered API keys (standard/elevated/unlimited) with rate limiting, usage analytics, and OpenAPI documentation
+
+**Webhook Subscriptions** — Event-driven webhooks with HMAC signatures, retry logic, and custom headers
---
@@ -215,15 +250,23 @@ See the [Installation Guide](docs/INSTALLATION.md) for step-by-step instructions
- [x] Databases — MySQL, PostgreSQL
- [x] File & FTP management
- [x] Monitoring & alerts — Metrics, webhooks, uptime tracking
-- [x] Security — 2FA, ClamAV, file integrity, Fail2ban
-- [x] Firewall — UFW integration
+- [x] Security — 2FA, ClamAV, file integrity, Fail2ban, Lynis
+- [x] Firewall — UFW/firewalld integration
- [x] Multi-server management — Go agent, centralized dashboard
- [x] Git deployment — Webhooks, auto-deploy, rollback, zero-downtime
-- [ ] Backup & restore — S3, Backblaze B2, scheduled backups
-- [ ] Email server — Postfix, Dovecot, DKIM/SPF/DMARC
-- [ ] Team & permissions — RBAC, audit logging
-- [ ] Mobile app — React Native with push notifications
-- [ ] Plugin marketplace — Extensions, custom widgets, themes
+- [x] Backup & restore — S3, Backblaze B2, scheduled backups
+- [x] Email server — Postfix, Dovecot, DKIM/SPF/DMARC, Roundcube
+- [x] Team & permissions — RBAC, invitations, audit logging
+- [x] API enhancements — API keys, rate limiting, OpenAPI docs, webhook subscriptions
+- [x] SSO & OAuth — Google, GitHub, OIDC, SAML
+- [x] Database migrations — Flask-Migrate/Alembic, versioned schema
+- [ ] Agent fleet management — Auto-upgrade, bulk ops, offline command queue
+- [ ] Cross-server monitoring — Fleet dashboard, anomaly detection, alerting
+- [ ] Agent plugin system — Extensible agent with custom metrics, commands, health checks
+- [ ] Server templates & config sync — Drift detection, compliance dashboards
+- [ ] Multi-tenancy — Workspaces, team isolation, per-workspace settings
+- [ ] DNS zone management — Cloudflare, Route53, DigitalOcean integrations
+- [ ] Status pages — Public status page, health checks, incident management
Full details: [ROADMAP.md](ROADMAP.md)
@@ -246,12 +289,15 @@ Full details: [ROADMAP.md](ROADMAP.md)
| Layer | Technology |
|-------|------------|
-| Backend | Python 3.11, Flask, SQLAlchemy, Flask-SocketIO |
-| Frontend | React 18, Vite, LESS |
+| Backend | Python 3.11, Flask, SQLAlchemy, Flask-SocketIO, Flask-Migrate |
+| Frontend | React 18, Vite, LESS, Recharts |
| Database | SQLite / PostgreSQL |
-| Web Server | Nginx, Gunicorn |
+| Web Server | Nginx, Gunicorn (GeventWebSocket) |
| Containers | Docker, Docker Compose |
-| Security | ClamAV, TOTP (pyotp), Cryptography |
+| Security | ClamAV, Lynis, Fail2ban, TOTP (pyotp), Fernet encryption |
+| Auth | JWT, OAuth 2.0, OIDC, SAML 2.0 |
+| Email | Postfix, Dovecot, SpamAssassin, Roundcube |
+| Agent | Go (multi-server), HMAC-SHA256, WebSocket |
---
@@ -263,7 +309,7 @@ Contributions are welcome! Please read [CONTRIBUTING.md](CONTRIBUTING.md) first.
fork → feature branch → commit → push → pull request
```
-**Priority areas:** Backup implementations, additional notification channels, UI/UX improvements, documentation.
+**Priority areas:** Agent plugin system, fleet management, DNS integrations, status pages, UI/UX improvements, documentation.
---
@@ -275,6 +321,12 @@ Join the Discord to ask questions, share feedback, or get help with your setup.
---
+## ⭐ Star History
+
+[](https://star-history.com/#jhd3197/ServerKit&Date)
+
+---
+
**ServerKit** — Simple. Modern. Self-hosted.
diff --git a/ROADMAP.md b/ROADMAP.md
index b427146..0d3c7e9 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -4,17 +4,15 @@ This document outlines the development roadmap for ServerKit. Features are organ
---
-## Current Version: v0.9.0
+## Current Version: v1.5.0 (In Development)
-### Recently Completed
+### Recently Completed (v1.4.0)
-- **Two-Factor Authentication (2FA)** - TOTP-based with backup codes
-- **Notification Webhooks** - Discord, Slack, Telegram, generic webhooks
-- **ClamAV Integration** - Malware scanning with quarantine
-- **File Integrity Monitoring** - Baseline creation and change detection
-- **Environment Variable Management** - Secure, encrypted per-app variables
-- **Cron Job Management** - Visual cron editor
-- **Server Uptime Tracking** - Historical uptime data and visualization
+- **Team & Permissions** - RBAC with admin/developer/viewer roles, invitations, audit logging
+- **API Enhancements** - API keys, rate limiting, webhook subscriptions, OpenAPI docs, analytics
+- **SSO & OAuth Login** - Google, GitHub, OIDC, SAML with account linking
+- **Database Migrations** - Flask-Migrate/Alembic with versioned schema migrations
+- **Email Server Management** - Postfix, Dovecot, DKIM, SpamAssassin, Roundcube
---
@@ -117,7 +115,7 @@ This document outlines the development roadmap for ServerKit. Features are organ
---
-## Phase 10: Multi-Server Management (In Progress)
+## Phase 10: Multi-Server Management (Completed)
**Priority: High**
@@ -137,7 +135,7 @@ This document outlines the development roadmap for ServerKit. Features are organ
---
-## Phase 11: Git Deployment (Planned)
+## Phase 11: Git Deployment (Completed)
**Priority: High**
@@ -180,44 +178,32 @@ This document outlines the development roadmap for ServerKit. Features are organ
---
-## Phase 14: Advanced SSL Features (Planned)
+## Phase 14: Team & Permissions (Completed)
**Priority: Medium**
-- [ ] Wildcard SSL certificates
-- [ ] Multi-domain certificates (SAN)
-- [ ] Custom certificate upload
-- [ ] Certificate expiry monitoring
-- [ ] Automatic renewal notifications
+- [x] Multi-user support
+- [x] Role-based access control (RBAC)
+- [x] Custom permission sets
+- [x] Audit logging per user
+- [x] Team invitations
+- [x] Activity dashboard
---
-## Phase 15: Team & Permissions (Planned)
+## Phase 15: API Enhancements (Completed)
**Priority: Medium**
-- [ ] Multi-user support
-- [ ] Role-based access control (RBAC)
-- [ ] Custom permission sets
-- [ ] Audit logging per user
-- [ ] Team invitations
-- [ ] Activity dashboard
+- [x] API key management
+- [x] Rate limiting
+- [x] Webhook event subscriptions
+- [x] OpenAPI/Swagger documentation
+- [x] API usage analytics
---
-## Phase 16: API Enhancements (Planned)
-
-**Priority: Medium**
-
-- [ ] API key management
-- [ ] Rate limiting
-- [ ] Webhook event subscriptions
-- [ ] OpenAPI/Swagger documentation
-- [ ] API usage analytics
-
----
-
-## Phase 17: Advanced Security (Completed)
+## Phase 16: Advanced Security (Completed)
**Priority: High**
@@ -233,88 +219,350 @@ This document outlines the development roadmap for ServerKit. Features are organ
---
-## Phase 18: Performance Optimization (Planned)
+## Phase 17: SSO & OAuth Login (Completed)
-**Priority: Low**
+**Priority: High**
-- [ ] Redis caching integration
-- [ ] Database query optimization
-- [ ] Static asset CDN support
-- [ ] Lazy loading for large datasets
-- [ ] Background job queue (Celery)
+- [x] Google OAuth 2.0 login
+- [x] GitHub OAuth login
+- [x] Generic OpenID Connect (OIDC) provider support
+- [x] SAML 2.0 support for enterprise environments
+- [x] Social login UI (provider buttons on login page)
+- [x] Account linking (connect OAuth identity to existing local account)
+- [x] Auto-provisioning of new users on first SSO login
+- [x] Configurable SSO settings (enable/disable providers, client ID/secret management)
+- [x] Enforce SSO-only login (disable password auth for team members)
+- [x] SSO session management and token refresh
---
-## Phase 19: Mobile App (Planned)
+## Phase 18: Database Migrations & Schema Versioning (Completed)
-**Priority: Low**
+**Priority: High**
-- [ ] React Native mobile application
-- [ ] Push notifications
-- [ ] Quick actions (restart, view stats)
-- [ ] Biometric authentication
+### Backend — Migration Engine
+- [x] Integrate Flask-Migrate (Alembic) for versioned schema migrations
+- [x] Generate initial migration from current model state as baseline
+- [x] Replace `_auto_migrate_columns()` hack with proper Alembic migrations
+- [x] Store schema version in a `schema_version` table (current version, history)
+- [x] API endpoints for migration status, apply, and rollback
+- [x] Auto-detect pending migrations on login and flag the session
+- [x] Pre-migration automatic DB backup before applying changes
+- [x] Migration scripts for all existing model changes (retroactive baseline)
+
+### CLI Fallback
+- [x] CLI commands for headless/SSH scenarios (`flask db upgrade`, `flask db status`)
+- [x] CLI rollback support (`flask db downgrade`)
---
-## Phase 20: Marketplace & Extensions (Planned)
+# Upcoming Development
-**Priority: Low**
+The phases below are ordered by priority. Higher phases ship first.
-- [ ] Plugin/extension system
-- [ ] Community marketplace
-- [ ] Custom dashboard widgets
-- [ ] Theme customization
+---
+
+## Phase 19: New UI & Services Page (Planned)
+
+**Priority: Critical**
+
+Merge the `new-ui` branch — adds a full Services page with service detail views, metrics, logs, shell, settings, git connect, and package management.
+
+- [ ] Merge `new-ui` branch into main development line
+- [ ] Services list page with status indicators and quick actions
+- [ ] Service detail page with tabbed interface (Metrics, Logs, Shell, Settings, Commands, Events, Packages)
+- [ ] Git connect modal for linking services to repositories
+- [ ] Gunicorn management tab for Python services
+- [ ] Service type detection and type-specific UI (Node, Python, PHP, Docker, etc.)
+- [ ] Resolve any conflicts with features added since branch diverged
---
-## Phase 21: SSO & OAuth Login (Completed)
+## Phase 20: Customizable Sidebar & Dashboard Views (Planned)
**Priority: High**
-- [x] Google OAuth 2.0 login
-- [x] GitHub OAuth login
-- [x] Generic OpenID Connect (OIDC) provider support
-- [x] SAML 2.0 support for enterprise environments
-- [x] Social login UI (provider buttons on login page)
-- [x] Account linking (connect OAuth identity to existing local account)
-- [x] Auto-provisioning of new users on first SSO login
-- [x] Configurable SSO settings (enable/disable providers, client ID/secret management)
-- [x] Enforce SSO-only login (disable password auth for team members)
-- [x] SSO session management and token refresh
+Let users personalize what they see. Not everyone runs email servers or manages Docker — the sidebar should adapt to each user's needs.
+
+- [ ] Sidebar configuration page in Settings
+- [ ] Preset view profiles: **Full** (default, all modules), **Web Hosting** (apps, domains, SSL, databases, files), **Email Admin** (email, DNS, security), **Docker/DevOps** (containers, deployments, git, monitoring), **Minimal** (apps, monitoring, backups only)
+- [ ] Custom view builder — toggle individual sidebar items on/off
+- [ ] Per-user preference storage (saved to user profile, synced across sessions)
+- [ ] Sidebar sections collapse/expand with memory
+- [ ] Quick-switch between saved view profiles
+- [ ] Admin can set default view for new users
+- [ ] Hide empty/unconfigured modules automatically (e.g., hide Email if no email domains exist)
---
-## Phase 22: Database Migrations & Schema Versioning (Planned)
+## Phase 21: Migration Wizard Frontend UI (Planned)
**Priority: High**
-Matomo-style update wizard — when the user logs in after an update and there are pending migrations, a popup/wizard guides them through the process visually.
+The backend migration engine is complete — this adds the visual upgrade experience (Matomo-style).
-### Backend — Migration Engine
-- [ ] Integrate Flask-Migrate (Alembic) for versioned schema migrations
-- [ ] Generate initial migration from current model state as baseline
-- [ ] Replace `_auto_migrate_columns()` hack with proper Alembic migrations
-- [ ] Store schema version in a `schema_version` table (current version, history)
-- [ ] API endpoint to check migration status (`GET /api/v1/system/migrations`)
-- [ ] API endpoint to run pending migrations (`POST /api/v1/system/migrations/apply`)
-- [ ] API endpoint to rollback last migration (`POST /api/v1/system/migrations/rollback`)
-- [ ] Auto-detect pending migrations on login and flag the session
-- [ ] Pre-migration automatic DB backup before applying changes
-- [ ] Migration scripts for all existing model changes (retroactive baseline)
-
-### Frontend — Update Wizard UI
- [ ] Full-screen modal/wizard that appears when pending migrations are detected
- [ ] Step 1: "Update Available" — show current version vs new version, changelog summary
- [ ] Step 2: "Backup" — auto-backup the database, show progress, confirm success
- [ ] Step 3: "Apply Migrations" — run migrations with real-time progress/log output
- [ ] Step 4: "Done" — success confirmation with summary of changes applied
- [ ] Error handling: if a migration fails, show the error and offer rollback option
-- [ ] Block access to the panel until migrations are applied (like Matomo does)
+- [ ] Block access to the panel until migrations are applied
- [ ] Migration history page in Settings showing all past migrations and timestamps
-### CLI Fallback
-- [ ] CLI commands for headless/SSH scenarios (`flask db upgrade`, `flask db status`)
-- [ ] CLI rollback support (`flask db downgrade`)
+---
+
+## Phase 22: Container Logs & Monitoring UI (Planned)
+
+**Priority: High**
+
+The container logs API is already built. This phase adds the frontend and extends monitoring to per-app metrics.
+
+- [ ] Log viewer component with terminal-style display and ANSI color support
+- [ ] Real-time log streaming via WebSocket with auto-scroll (pause on user scroll)
+- [ ] Log search with regex support and match highlighting
+- [ ] Filter by log level (INFO, WARN, ERROR, DEBUG) and time range
+- [ ] Export filtered logs to file
+- [ ] Per-container resource collection (CPU %, memory, network I/O via Docker stats API)
+- [ ] Per-app resource usage charts (Recharts) with time range selector (1h, 6h, 24h, 7d)
+- [ ] Per-app alert rules (metric, operator, threshold, duration)
+- [ ] Alert notifications via existing channels (email, Discord, Telegram) with cooldown
+
+---
+
+## Phase 23: Agent Fleet Management (Planned)
+
+**Priority: High**
+
+Level up agent management from "connect and monitor" to full fleet control.
+
+- [ ] Agent version tracking and compatibility matrix (panel version ↔ agent version)
+- [ ] Push agent upgrades from the panel (single server or fleet-wide rollout)
+- [ ] Staged rollout support — upgrade agents in batches with health checks between waves
+- [ ] Agent health dashboard — connection uptime, heartbeat latency, command success rate per agent
+- [ ] Auto-discovery of new servers on the local network (mDNS/broadcast scan)
+- [ ] Agent registration approval workflow (admin must approve before agent joins fleet)
+- [ ] Bulk agent operations — restart, upgrade, rotate keys across selected servers
+- [ ] Agent changelog and release notes visible in UI
+- [ ] Offline agent command queue — persist commands and deliver when agent reconnects
+- [ ] Command retry with configurable backoff for failed/timed-out operations
+- [ ] Agent connection diagnostics — test connectivity, latency, firewall check from panel
+
+---
+
+## Phase 24: Cross-Server Monitoring Dashboard (Planned)
+
+**Priority: High**
+
+Fleet-wide visibility — see everything at a glance and catch problems early.
+
+- [ ] Fleet overview dashboard — heatmap of all servers by CPU/memory/disk usage
+- [ ] Server comparison charts — overlay metrics from multiple servers on one graph
+- [ ] Per-server alert thresholds (CPU > 80% for 5 min → warning, > 95% → critical)
+- [ ] Anomaly detection — automatic baseline learning, alert on deviations
+- [ ] Custom metric dashboards — drag-and-drop widgets, save layouts per user
+- [ ] Metric correlation view — spot relationships between metrics across servers
+- [ ] Capacity forecasting — trend-based predictions (disk full in X days, memory growth rate)
+- [ ] Metrics export — Prometheus endpoint (`/metrics`), CSV download, JSON API
+- [ ] Grafana integration guide and pre-built dashboard templates
+- [ ] Fleet-wide search — find which server is running a specific container, service, or port
+
+---
+
+## Phase 25: Agent Plugin System (Planned)
+
+**Priority: High**
+
+Make the agent extensible — let users add custom capabilities without modifying agent core. This is the foundation for future integrations (Android device farms, IoT fleets, custom hardware monitoring, etc.).
+
+### Plugin Architecture
+- [ ] Plugin specification — standard interface (init, healthcheck, metrics, commands)
+- [ ] Plugin manifest format (YAML/JSON) — name, version, dependencies, capabilities, permissions
+- [ ] Plugin lifecycle management — install, enable, disable, uninstall, upgrade
+- [ ] Plugin isolation — each plugin runs in its own process/sandbox with resource limits
+- [ ] Plugin communication — standardized IPC between plugin and agent core
+
+### Plugin Capabilities
+- [ ] Custom metrics reporters — plugins can push arbitrary metrics to the panel
+- [ ] Custom health checks — plugins define checks that feed into the status system
+- [ ] Custom commands — plugins register new command types the panel can invoke
+- [ ] Scheduled tasks — plugins can register periodic jobs (cron-like)
+- [ ] Event hooks — plugins can react to agent events (connect, disconnect, command, alert)
+
+### Panel Integration
+- [ ] Plugin management UI — install, configure, monitor plugins per server
+- [ ] Plugin marketplace / registry — browse and install community plugins
+- [ ] Plugin configuration editor — per-server plugin settings from the panel
+- [ ] Plugin logs and diagnostics — view plugin output and errors
+- [ ] Plugin metrics visualization — custom widgets for plugin-reported data
+
+### Developer Experience
+- [ ] Plugin SDK (Go module) — scaffolding, helpers, testing tools
+- [ ] Plugin template repository — quickstart for new plugin development
+- [ ] Local plugin development mode — hot-reload, debug logging
+- [ ] Plugin documentation and API reference
+
+---
+
+## Phase 26: Server Templates & Config Sync (Planned)
+
+**Priority: Medium**
+
+Define what a server should look like, apply it, and detect when it drifts.
+
+- [ ] Server template builder — define expected state (packages, services, firewall rules, users, files)
+- [ ] Template library — save and reuse templates (e.g., "Web Server", "Database Server", "Mail Server")
+- [ ] Apply template to server — install packages, configure services, set firewall rules via agent
+- [ ] Config drift detection — periodic comparison of actual vs. expected state
+- [ ] Drift report UI — visual diff showing what changed and when
+- [ ] Auto-remediation option — automatically fix drift back to template (with approval toggle)
+- [ ] Template versioning — track changes to templates over time
+- [ ] Template inheritance — base template + role-specific overrides
+- [ ] Bulk apply — roll out template changes across server groups
+- [ ] Compliance dashboard — percentage of fleet in compliance per template
+
+---
+
+## Phase 27: Multi-Tenancy & Workspaces (Planned)
+
+**Priority: Medium**
+
+Isolate servers by team, client, or project. Essential for agencies, MSPs, and larger teams.
+
+- [ ] Workspace model — isolated container for servers, users, and settings
+- [ ] Workspace CRUD — create, rename, archive workspaces
+- [ ] Server assignment — each server belongs to exactly one workspace
+- [ ] User workspace membership — users can belong to multiple workspaces with different roles
+- [ ] Workspace switching — quick-switch dropdown in the header
+- [ ] Per-workspace settings — notification preferences, default templates, branding
+- [ ] Workspace-scoped API keys — API keys restricted to a single workspace
+- [ ] Cross-workspace admin view — super-admin can see all workspaces and usage
+- [ ] Workspace usage quotas — limit servers, users, or API calls per workspace
+- [ ] Workspace billing integration — track resource usage per workspace for invoicing
+
+---
+
+## Phase 28: Advanced SSL Features (Planned)
+
+**Priority: Medium**
+
+- [x] Certificate expiry monitoring
+- [ ] Wildcard SSL certificates via DNS-01 challenge
+- [ ] Multi-domain certificates (SAN)
+- [ ] Custom certificate upload (key + cert + chain)
+- [ ] Certificate expiry notifications (email/webhook alerts before expiration)
+- [ ] SSL configuration templates (modern, intermediate, legacy compatibility)
+- [ ] SSL health check dashboard (grade, cipher suites, protocol versions)
+
+---
+
+## Phase 29: DNS Zone Management (Planned)
+
+**Priority: Medium**
+
+Full DNS record management with provider API integration.
+
+- [ ] DNS zone editor UI (A, AAAA, CNAME, MX, TXT, SRV, CAA records)
+- [ ] Cloudflare API integration (list/create/update/delete records)
+- [ ] Route53 API integration
+- [ ] DigitalOcean DNS integration
+- [ ] DNS propagation checker (query multiple nameservers)
+- [ ] Auto-generate recommended records for hosted services (SPF, DKIM, DMARC, MX)
+- [ ] DNS template presets (e.g., "standard web hosting", "email hosting")
+- [ ] Bulk record import/export (BIND zone file format)
+
+---
+
+## Phase 30: Nginx Advanced Configuration (Planned)
+
+**Priority: Medium**
+
+Go beyond basic virtual hosts — full reverse proxy and performance configuration.
+
+- [ ] Visual reverse proxy rule builder (upstream servers, load balancing methods)
+- [ ] Load balancing configuration (round-robin, least connections, IP hash)
+- [ ] Caching rules editor (proxy cache zones, TTLs, cache bypass rules)
+- [ ] Rate limiting at proxy level (per-IP, per-route)
+- [ ] Custom location block editor with syntax validation
+- [ ] Header manipulation (add/remove/modify request/response headers)
+- [ ] Nginx config syntax check before applying changes
+- [ ] Config diff preview before saving
+- [ ] Access/error log viewer per virtual host
+
+---
+
+## Phase 31: Status Page & Health Checks (Planned)
+
+**Priority: Medium**
+
+Public-facing status page and automated health monitoring.
+
+- [ ] Automated health checks (HTTP, TCP, DNS, SMTP) with configurable intervals
+- [ ] Public status page (standalone URL, no auth required)
+- [ ] Status page customization (logo, colors, custom domain)
+- [ ] Service grouping on status page (e.g., "Web Services", "Email", "APIs")
+- [ ] Incident management — create, update, resolve incidents with timeline
+- [ ] Uptime percentage display (24h, 7d, 30d, 90d)
+- [ ] Scheduled maintenance windows with advance notifications
+- [ ] Status page subscribers (email/webhook notifications on incidents)
+- [ ] Historical uptime graphs
+- [ ] Status badge embeds (SVG/PNG for README files)
+
+---
+
+## Phase 32: Server Provisioning APIs (Planned)
+
+**Priority: Medium**
+
+Spin up and manage cloud servers directly from the panel.
+
+- [ ] DigitalOcean API integration (create/destroy/resize droplets)
+- [ ] Hetzner Cloud API integration
+- [ ] Vultr API integration
+- [ ] Linode/Akamai API integration
+- [ ] Server creation wizard (region, size, OS, SSH keys)
+- [ ] Auto-install ServerKit agent on provisioned servers
+- [ ] Server cost tracking and billing overview
+- [ ] Snapshot management (create/restore/delete)
+- [ ] One-click server cloning
+- [ ] Destroy server with confirmation safeguards
+
+---
+
+## Phase 33: Performance Optimization (Planned)
+
+**Priority: Low**
+
+- [ ] Redis caching for frequently accessed data (metrics, server status)
+- [ ] Database query optimization and slow query logging
+- [ ] Background job queue (Celery or RQ) for long-running tasks
+- [ ] Lazy loading for large datasets (paginated API responses)
+- [ ] WebSocket connection pooling and reconnection improvements
+- [ ] Frontend bundle optimization and code splitting
+
+---
+
+## Phase 34: Mobile App (Future)
+
+**Priority: Low — v3.0+**
+
+- [ ] React Native or PWA mobile application
+- [ ] Push notifications for alerts and incidents
+- [ ] Quick actions (restart services, view stats, acknowledge alerts)
+- [ ] Biometric authentication (fingerprint/Face ID)
+- [ ] Offline mode with cached server status
+
+---
+
+## Phase 35: Marketplace & Extensions (Future)
+
+**Priority: Low — v3.0+**
+
+- [ ] Plugin/extension system with API hooks
+- [ ] Community marketplace for plugins
+- [ ] Custom dashboard widgets
+- [ ] Theme customization (colors, layout, branding)
+- [ ] Extension SDK and developer documentation
---
@@ -322,14 +570,19 @@ Matomo-style update wizard — when the user logs in after an update and there a
| Version | Target Features | Status |
|---------|-----------------|--------|
-| v0.9.0 | Core features, 2FA, Notifications, Security | Current |
-| v1.0.0 | Production-ready stable release, DB migrations | Planned |
-| v1.1.0 | Multi-server, Git deployment | Planned |
-| v1.2.0 | Backups, Advanced SSL, Advanced Security | Planned |
-| v1.3.0 | Email server, API enhancements | Planned |
-| v1.4.0 | Team & permissions, SSO & OAuth login | Planned |
-| v1.5.0 | Performance optimizations | Planned |
-| v2.0.0 | Mobile app, Marketplace | Future |
+| v0.9.0 | Core features, 2FA, Notifications, Security | Completed |
+| v1.0.0 | Production-ready stable release, DB migrations | Completed |
+| v1.1.0 | Multi-server, Git deployment | Completed |
+| v1.2.0 | Backups, Advanced SSL, Advanced Security | Completed |
+| v1.3.0 | Email server, API enhancements | Completed |
+| v1.4.0 | Team & permissions, SSO & OAuth login | Completed |
+| v1.5.0 | New UI, customizable sidebar, migration wizard UI | Current |
+| v1.6.0 | Container monitoring UI, agent fleet management | Planned |
+| v1.7.0 | Cross-server monitoring, agent plugin system | Planned |
+| v1.8.0 | Server templates, multi-tenancy | Planned |
+| v1.9.0 | Advanced SSL, DNS management, Nginx config | Planned |
+| v2.0.0 | Status pages, server provisioning, performance | Planned |
+| v3.0.0 | Mobile app, Marketplace | Future |
---
@@ -338,10 +591,10 @@ Matomo-style update wizard — when the user logs in after an update and there a
Want to help? See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines.
**Priority areas for contributions:**
-- Multi-server agent development
-- Git webhook integration
-- S3/B2 backup implementations
-- Additional notification channels
+- Agent plugin SDK and example plugins
+- Fleet management and monitoring dashboard
+- DNS provider integrations (Cloudflare, Route53)
+- Status page and health check system
- UI/UX improvements
- Documentation
From ca87d1d54aa4421b89de61a4d0ce3005aaa0cdf0 Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Wed, 4 Mar 2026 02:54:14 -0500
Subject: [PATCH 13/18] Silence Bandit and mock sudo/posix in tests
Add a nosec comment to the http_requests.post call in EventService to suppress Bandit B113 warnings for the subscription post. Update many tests in test_utils_system.py to explicitly patch app.utils.system.os.name as 'posix' and app.utils.system.shutil.which to simulate presence of /usr/bin/sudo (and include sudo in apt/dnf side effects). Adjust test function signatures to accept the new mock and ensure package/service tests run as a non-root POSIX environment with sudo available.
---
backend/app/services/event_service.py | 2 +-
backend/tests/test_utils_system.py | 42 ++++++++++++++++++++-------
2 files changed, 32 insertions(+), 12 deletions(-)
diff --git a/backend/app/services/event_service.py b/backend/app/services/event_service.py
index ed39293..9acf7de 100644
--- a/backend/app/services/event_service.py
+++ b/backend/app/services/event_service.py
@@ -176,7 +176,7 @@ def deliver(delivery_id):
start_time = time.time()
try:
- resp = http_requests.post(
+ resp = http_requests.post( # nosec B113
subscription.url,
data=payload_json,
headers=headers,
diff --git a/backend/tests/test_utils_system.py b/backend/tests/test_utils_system.py
index ab0fb8e..a5e053b 100644
--- a/backend/tests/test_utils_system.py
+++ b/backend/tests/test_utils_system.py
@@ -48,9 +48,11 @@
class TestRunPrivileged:
"""Tests for :func:`run_privileged`."""
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_prepends_sudo_when_not_root(self, _euid, mock_run):
+ def test_prepends_sudo_when_not_root(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
run_privileged(['systemctl', 'restart', 'nginx'])
mock_run.assert_called_once_with(
@@ -104,9 +106,11 @@ def test_caller_can_override_defaults(self, _euid, mock_run):
_, kwargs = mock_run.call_args
assert kwargs['capture_output'] is False
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_string_command_gets_sudo(self, _euid, mock_run):
+ def test_string_command_gets_sudo(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
run_privileged('systemctl restart nginx')
args, _ = mock_run.call_args
@@ -227,9 +231,10 @@ def test_is_installed_no_manager(self, _which):
# -- install --
+ @patch('app.utils.system.os.name', 'posix')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- @patch('app.utils.system.shutil.which', side_effect=lambda c: '/usr/bin/apt' if c == 'apt' else None)
+ @patch('app.utils.system.shutil.which', side_effect=lambda c: '/usr/bin/apt' if c == 'apt' else ('/usr/bin/sudo' if c == 'sudo' else None))
def test_install_apt(self, _which, _euid, mock_run):
mock_run.return_value = subprocess.CompletedProcess([], 0)
result = PackageManager.install(['nginx', 'curl'])
@@ -238,9 +243,10 @@ def test_install_apt(self, _which, _euid, mock_run):
capture_output=True, text=True, timeout=300,
)
+ @patch('app.utils.system.os.name', 'posix')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- @patch('app.utils.system.shutil.which', side_effect=lambda c: '/usr/bin/dnf' if c == 'dnf' else None)
+ @patch('app.utils.system.shutil.which', side_effect=lambda c: '/usr/bin/dnf' if c == 'dnf' else ('/usr/bin/sudo' if c == 'sudo' else None))
def test_install_dnf(self, _which, _euid, mock_run):
mock_run.return_value = subprocess.CompletedProcess([], 0)
PackageManager.install('nginx')
@@ -259,9 +265,11 @@ def test_install_no_manager_raises(self, _which):
class TestServiceControl:
"""Tests for :class:`ServiceControl`."""
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_start(self, _euid, mock_run):
+ def test_start(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
ServiceControl.start('nginx')
mock_run.assert_called_once_with(
@@ -269,9 +277,11 @@ def test_start(self, _euid, mock_run):
capture_output=True, text=True,
)
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_stop(self, _euid, mock_run):
+ def test_stop(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
ServiceControl.stop('nginx')
mock_run.assert_called_once_with(
@@ -279,9 +289,11 @@ def test_stop(self, _euid, mock_run):
capture_output=True, text=True,
)
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_restart(self, _euid, mock_run):
+ def test_restart(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
ServiceControl.restart('nginx')
mock_run.assert_called_once_with(
@@ -289,9 +301,11 @@ def test_restart(self, _euid, mock_run):
capture_output=True, text=True,
)
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_reload(self, _euid, mock_run):
+ def test_reload(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
ServiceControl.reload('nginx')
mock_run.assert_called_once_with(
@@ -299,9 +313,11 @@ def test_reload(self, _euid, mock_run):
capture_output=True, text=True,
)
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_enable(self, _euid, mock_run):
+ def test_enable(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
ServiceControl.enable('nginx')
mock_run.assert_called_once_with(
@@ -309,9 +325,11 @@ def test_enable(self, _euid, mock_run):
capture_output=True, text=True,
)
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_disable(self, _euid, mock_run):
+ def test_disable(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
ServiceControl.disable('nginx')
mock_run.assert_called_once_with(
@@ -319,9 +337,11 @@ def test_disable(self, _euid, mock_run):
capture_output=True, text=True,
)
+ @patch('app.utils.system.os.name', 'posix')
+ @patch('app.utils.system.shutil.which', return_value='/usr/bin/sudo')
@patch('app.utils.system.subprocess.run')
@patch('app.utils.system.os.geteuid', return_value=1000, create=True)
- def test_daemon_reload(self, _euid, mock_run):
+ def test_daemon_reload(self, _euid, mock_run, _which):
mock_run.return_value = subprocess.CompletedProcess([], 0)
ServiceControl.daemon_reload()
mock_run.assert_called_once_with(
From 315512e46768241cf07425fd3e5f71e14cd49f46 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
Date: Wed, 4 Mar 2026 07:55:46 +0000
Subject: [PATCH 14/18] chore: bump version to 1.3.3 [skip ci]
---
VERSION | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/VERSION b/VERSION
index 1892b92..31e5c84 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.3.2
+1.3.3
From 0aeb1634f40bde6fcf268db2d4180369aff185d7 Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Wed, 4 Mar 2026 02:59:45 -0500
Subject: [PATCH 15/18] Bump Authlib to 1.6.9
Update backend/requirements.txt to upgrade Authlib from 1.5.2 to 1.6.9. Verify SSO/OAuth flows and run tests to ensure compatibility with the newer Authlib release.
---
backend/requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 165a781..e15ea80 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -57,7 +57,7 @@ requests==2.32.5
boto3==1.35.0
# SSO / OAuth
-Authlib==1.5.2
+Authlib==1.6.9
python3-saml==1.16.0
# OpenAPI documentation
From c27dac02cfe0996fcea3d743080c6b6db2759dcd Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
Date: Wed, 4 Mar 2026 07:59:55 +0000
Subject: [PATCH 16/18] chore: bump version to 1.3.4 [skip ci]
---
VERSION | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/VERSION b/VERSION
index 31e5c84..d0149fe 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.3.3
+1.3.4
From 27d4f839f4084355d3d6d62240e000f753069643 Mon Sep 17 00:00:00 2001
From: Juan Denis <13461850+jhd3197@users.noreply.github.com>
Date: Wed, 4 Mar 2026 03:20:23 -0500
Subject: [PATCH 17/18] Auth key usage, sudo user option, test path fix
Allow authenticated (non-admin) users to fetch API key usage by replacing @admin_required with @auth_required in api_analytics.py and importing auth_required. Add optional user support to privileged_cmd and run_privileged in app/utils/system.py so callers can run commands as a specific user (sudo -u). Update wordpress_service to use the new user keyword when invoking privileged commands. Fix tests/conftest.py to put the backend root (parent directory) on sys.path so tests import correctly.
---
backend/app/api/api_analytics.py | 4 ++--
backend/app/services/wordpress_service.py | 2 +-
backend/app/utils/system.py | 13 ++++++++++---
backend/tests/conftest.py | 2 +-
4 files changed, 14 insertions(+), 7 deletions(-)
diff --git a/backend/app/api/api_analytics.py b/backend/app/api/api_analytics.py
index 612dac5..e4ff45e 100644
--- a/backend/app/api/api_analytics.py
+++ b/backend/app/api/api_analytics.py
@@ -1,6 +1,6 @@
"""API analytics endpoints."""
from flask import Blueprint, jsonify, request
-from app.middleware.rbac import admin_required, get_current_user
+from app.middleware.rbac import admin_required, auth_required, get_current_user
from app.services.api_analytics_service import ApiAnalyticsService
from app.services.api_key_service import ApiKeyService
@@ -42,7 +42,7 @@ def timeseries():
@api_analytics_bp.route('/keys//usage', methods=['GET'])
-@admin_required
+@auth_required
def key_usage(key_id):
"""Get usage stats for a specific API key."""
period = request.args.get('period', '24h')
diff --git a/backend/app/services/wordpress_service.py b/backend/app/services/wordpress_service.py
index 22204eb..6aa14bb 100644
--- a/backend/app/services/wordpress_service.py
+++ b/backend/app/services/wordpress_service.py
@@ -71,7 +71,7 @@ def wp_cli(cls, path: str, command: List[str], user: str = 'www-data') -> Dict:
return install_result
try:
- cmd = privileged_cmd(['sudo', '-u', user, cls.WP_CLI_PATH, '--path=' + path] + command)
+ cmd = privileged_cmd([cls.WP_CLI_PATH, '--path=' + path] + command, user=user)
result = subprocess.run(
cmd,
capture_output=True,
diff --git a/backend/app/utils/system.py b/backend/app/utils/system.py
index 1c46620..88047f3 100644
--- a/backend/app/utils/system.py
+++ b/backend/app/utils/system.py
@@ -27,33 +27,40 @@ def _needs_sudo() -> bool:
return True
-def privileged_cmd(cmd: Union[List[str], str]) -> Union[List[str], str]:
+def privileged_cmd(cmd: Union[List[str], str], *, user: Optional[str] = None) -> Union[List[str], str]:
"""Return *cmd* with ``sudo`` prepended when necessary.
Use this when you need the command list for ``Popen`` or other non-``run``
callers. For simple ``subprocess.run`` calls prefer :func:`run_privileged`.
+
+ Pass *user* to run the command as a specific user (``sudo -u ``).
"""
if isinstance(cmd, str):
if _needs_sudo() and not cmd.lstrip().startswith('sudo '):
+ if user:
+ return f'sudo -u {user} {cmd}'
return f'sudo {cmd}'
return cmd
cmd = list(cmd)
if _needs_sudo() and cmd[0] != 'sudo':
+ if user:
+ return ['sudo', '-u', user] + cmd
return ['sudo'] + cmd
return cmd
-def run_privileged(cmd: Union[List[str], str], **kwargs) -> subprocess.CompletedProcess:
+def run_privileged(cmd: Union[List[str], str], *, user: Optional[str] = None, **kwargs) -> subprocess.CompletedProcess:
"""Run a command with sudo if the current process is not root.
Prepends ``sudo`` only when needed (not root, not Windows, sudo exists).
+ Pass *user* to run the command as a specific user (``sudo -u ``).
Defaults to ``capture_output=True, text=True`` but callers can override.
Returns the raw ``CompletedProcess`` so services keep their existing
error-handling patterns.
"""
- cmd = privileged_cmd(cmd)
+ cmd = privileged_cmd(cmd, user=user)
kwargs.setdefault('capture_output', True)
kwargs.setdefault('text', True)
return subprocess.run(cmd, **kwargs)
diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py
index 6bafcc1..e727d34 100644
--- a/backend/tests/conftest.py
+++ b/backend/tests/conftest.py
@@ -5,7 +5,7 @@
import pytest
# Ensure backend root is on path
-_backend = os.path.dirname(os.path.abspath(__file__))
+_backend = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if _backend not in sys.path:
sys.path.insert(0, _backend)
From 2c55d76da564a50850ca135fcadcba589ff4f27a Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
Date: Wed, 4 Mar 2026 08:20:33 +0000
Subject: [PATCH 18/18] chore: bump version to 1.3.5 [skip ci]
---
VERSION | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/VERSION b/VERSION
index d0149fe..80e78df 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.3.4
+1.3.5