diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000..d0febe6f
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,13 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
+version: 2
+updates:
+ - package-ecosystem: "npm"
+ directory: "/"
+ schedule:
+ interval: "monthly"
+ allow:
+ - dependency-name: "astro"
+ - dependency-name: "@astrojs/starlight"
diff --git a/.github/scripts/languagetool_reviewdog.py b/.github/scripts/languagetool_reviewdog.py
new file mode 100644
index 00000000..050dc37e
--- /dev/null
+++ b/.github/scripts/languagetool_reviewdog.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+import argparse
+import json
+import os
+import re
+import subprocess
+from typing import Dict, List, Optional, Set, Tuple
+
+import requests
+
+
+def sh(*args: str) -> str:
+ return subprocess.check_output(args, text=True).strip()
+
+
+def offset_to_line_col(text: str, offset: int) -> Tuple[int, int]:
+ # reviewdog wants 1-based line/column
+ line = text.count("\n", 0, offset) + 1
+ last_nl = text.rfind("\n", 0, offset)
+ col = offset - (last_nl + 1) + 1
+ return line, col
+
+
+def normalize_word(s: str) -> str:
+ s = re.sub(r"^[\W_]+|[\W_]+$", "", s, flags=re.UNICODE)
+ return s.lower()
+
+
+def load_dictionary(path: str) -> Set[str]:
+ if not path or not os.path.exists(path):
+ return set()
+ words: Set[str] = set()
+ with open(path, "r", encoding="utf-8") as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith("#"):
+ continue
+ words.add(line.lower())
+ return words
+
+
+def changed_files(base_sha: str, head_sha: str) -> List[str]:
+ # list only changed files in the PR
+ out = sh("git", "diff", "--name-only", base_sha, head_sha)
+ files = [x.strip() for x in out.splitlines() if x.strip()]
+ return files
+
+
+def is_text_file(path: str) -> bool:
+ ext = os.path.splitext(path)[1].lower()
+ return ext in {".md", ".txt", ".rst", ".adoc", ".asciidoc", ".tex"} or os.path.basename(path).lower() in {
+ "readme", "readme.md", "readme.txt"
+ }
+
+
+def lt_check(api_url: str, language: str, text: str) -> Dict:
+ resp = requests.post(
+ api_url,
+ data={
+ "language": language,
+ "text": text,
+ },
+ timeout=60,
+ )
+ resp.raise_for_status()
+ return resp.json()
+
+
+def main() -> int:
+ ap = argparse.ArgumentParser()
+ ap.add_argument("--api-url", required=True)
+ ap.add_argument("--language", required=True)
+ ap.add_argument("--base-sha", required=True)
+ ap.add_argument("--head-sha", required=True)
+ ap.add_argument("--dictionary", default=".languagetool/words.txt")
+ ap.add_argument("--max-suggestions", type=int, default=3)
+ args = ap.parse_args()
+
+ dict_words = load_dictionary(args.dictionary)
+
+ files = changed_files(args.base_sha, args.head_sha)
+ files = [f for f in files if os.path.exists(f) and is_text_file(f)]
+
+ diagnostics: List[Dict] = []
+
+ for path in files:
+ try:
+ with open(path, "r", encoding="utf-8") as f:
+ content = f.read()
+ except UnicodeDecodeError:
+ with open(path, "r", encoding="utf-8", errors="replace") as f:
+ content = f.read()
+
+ if not content.strip():
+ continue
+
+ try:
+ result = lt_check(args.api_url, args.language, content)
+ except Exception as e:
+ # Emit a single diagnostic if the API call fails for a file
+ diagnostics.append(
+ {
+ "message": f"LanguageTool API error for {path}: {e}",
+ "location": {"path": path, "range": {"start": {"line": 1, "column": 1}}},
+ "severity": "WARNING",
+ }
+ )
+ continue
+
+ matches = result.get("matches", [])
+ for m in matches:
+ offset = int(m.get("offset", 0))
+ length = int(m.get("length", 0))
+ bad = content[offset : offset + length]
+
+ rule = m.get("rule", {}) or {}
+ rule_id = rule.get("id") or "UNKNOWN_RULE"
+ category = (rule.get("category", {}) or {}).get("id", "")
+
+ # Cheap custom dictionary support without modifying LT server:
+ # if LT reports a spelling/typo-ish issue AND the token is in our dictionary -> ignore it.
+ # (Most spelling problems show up in category TYPOS and/or rule ids containing MORFOLOGIK.)
+ bad_norm = normalize_word(bad)
+ if dict_words and bad_norm:
+ looks_like_spelling = (category.upper() == "TYPOS") or ("MORFOLOGIK" in str(rule_id).upper())
+ if looks_like_spelling and (bad_norm in dict_words):
+ continue
+
+ start_line, start_col = offset_to_line_col(content, offset)
+ end_line, end_col = offset_to_line_col(content, offset + max(length, 0))
+
+ # Suggestions (as rdjson "suggestions" with ranges)
+ suggestions = []
+ repls = m.get("replacements", []) or []
+ for r in repls[: args.max_suggestions]:
+ val = r.get("value")
+ if not val:
+ continue
+ suggestions.append(
+ {
+ "range": {
+ "start": {"line": start_line, "column": start_col},
+ "end": {"line": end_line, "column": end_col},
+ },
+ "text": val,
+ }
+ )
+
+ code = {"value": rule_id}
+ urls = rule.get("urls") or []
+ if urls and isinstance(urls, list):
+ u = urls[0].get("value")
+ if u:
+ code["url"] = u
+
+ diagnostics.append(
+ {
+ "message": m.get("message") or "LanguageTool finding",
+ "location": {
+ "path": path,
+ "range": {
+ "start": {"line": start_line, "column": start_col},
+ "end": {"line": end_line, "column": end_col},
+ },
+ },
+ "severity": "WARNING",
+ "code": code,
+ **({"suggestions": suggestions} if suggestions else {}),
+ }
+ )
+
+ rdjson = {
+ "source": {
+ "name": "LanguageTool",
+ "url": "https://languagetool.org",
+ },
+ "diagnostics": diagnostics,
+ }
+
+ print(json.dumps(rdjson))
+ return 0
+
+
+if __name__ == "__main__":
+ raise SystemExit(main())
diff --git a/.github/workflows/checkspelling.yml b/.github/workflows/checkspelling.yml
new file mode 100644
index 00000000..d8ec2885
--- /dev/null
+++ b/.github/workflows/checkspelling.yml
@@ -0,0 +1,154 @@
+name: Harper (grammar + spelling)
+
+on:
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+
+permissions:
+ contents: read
+ pull-requests: write
+
+jobs:
+ harper:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Set up Rust
+ uses: dtolnay/rust-toolchain@stable
+
+ - name: Cache Cargo
+ uses: actions/cache@v4
+ with:
+ path: |
+ ~/.cargo/registry
+ ~/.cargo/git
+ target
+ key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-cargo-
+
+ - name: Install harper-cli
+ run: |
+ cargo install --locked --git https://github.com/Automattic/harper.git harper-cli
+
+ - name: Write Harper dictionary
+ env:
+ XDG_CONFIG_HOME: ${{ github.workspace }}/.xdg
+ run: |
+ mkdir -p "$XDG_CONFIG_HOME/harper-ls"
+ cat > "$XDG_CONFIG_HOME/harper-ls/dictionary.txt" <<'EOF'
+ AerynOS
+ astrojs
+ sha256sum
+ SHA256
+ certutil
+ hashfile
+ lastUpdated
+ EOF
+
+ - name: Run Harper on PR-changed files
+ id: harper
+ env:
+ XDG_CONFIG_HOME: ${{ github.workspace }}/.xdg
+ run: |
+ set -euo pipefail
+
+ BASE="${{ github.event.pull_request.base.sha }}"
+ HEAD="${{ github.event.pull_request.head.sha }}"
+
+ mapfile -t FILES < <(git diff --name-only "$BASE" "$HEAD" -- \
+ '*.md' '*.mdx' '*.txt' || true)
+
+ : > harper-report.txt
+
+ if [ "${#FILES[@]}" -eq 0 ]; then
+ echo "No matching files changed (.md/.mdx/.txt)." > harper-report.txt
+ echo "fail=0" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ fail=0
+ for f in "${FILES[@]}"; do
+ if [ ! -f "$f" ]; then
+ continue
+ fi
+
+ echo "===== $f =====" >> harper-report.txt
+ echo >> harper-report.txt
+
+ out="$(harper-cli lint "$f" || true)"
+ echo "$out" >> harper-report.txt
+ echo >> harper-report.txt
+
+ after="$(printf '%s\n' "$out" | sed -n 's/.*after overlap removal, \([0-9]\+\) after.*/\1/p' | tail -n 1)"
+ after="${after:-0}"
+
+ if [ "$after" -ne 0 ]; then
+ fail=1
+ fi
+ done
+
+ echo "fail=$fail" >> "$GITHUB_OUTPUT"
+
+ - name: Comment on PR with Harper output
+ if: github.event.pull_request
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const fs = require('fs');
+
+ const raw = fs.readFileSync('harper-report.txt', 'utf8');
+
+ const limit = 65000;
+ const clipped =
+ raw.length > limit
+ ? raw.slice(0, limit) + "\n\n[truncated]\n"
+ : raw;
+
+ const marker = '';
+ const body =
+ `${marker}\n` +
+ `\n` +
+ `Harper output
\n\n` +
+ "```text\n" +
+ clipped +
+ "\n```\n" +
+ ` \n`;
+
+ const { owner, repo } = context.repo;
+ const issue_number = context.issue.number;
+
+ const comments = await github.rest.issues.listComments({
+ owner,
+ repo,
+ issue_number,
+ });
+
+ const existing = comments.data.find(c =>
+ c.body && c.body.includes(marker)
+ );
+
+ if (existing) {
+ await github.rest.issues.updateComment({
+ owner,
+ repo,
+ comment_id: existing.id,
+ body,
+ });
+ } else {
+ await github.rest.issues.createComment({
+ owner,
+ repo,
+ issue_number,
+ body,
+ });
+ }
+
+ - name: Fail if Harper found issues
+ if: steps.harper.outputs.fail == '1'
+ run: exit 1
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
deleted file mode 100644
index f9fa42a3..00000000
--- a/.github/workflows/deploy.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: Deploy to GitHub Pages
-
-on:
- # Trigger the workflow every time you push to the `main` branch
- # Using a different branch name? Replace `main` with your branch’s name
- push:
- branches: [ main ]
- # Allows you to run this workflow manually from the Actions tab on GitHub.
- workflow_dispatch:
-
-# Allow this job to clone the repo and create a page deployment
-permissions:
- contents: read
- pages: write
- id-token: write
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout your repository using git
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
- - name: Install, build, and upload your site
- uses: withastro/action@v3
- with:
- path: . # The root location of your Astro project inside the repository. (optional)
- node-version: 20 # The specific version of Node that should be used to build your site. Defaults to 20. (optional)
- package-manager: npm # The Node package manager that should be used to install dependencies and build your site. Automatically detected based on your lockfile. (optional)
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
- deploy:
- needs: build
- runs-on: ubuntu-latest
- environment:
- name: github-pages
- url: ${{ steps.deployment.outputs.page_url }}
- steps:
- - name: Deploy to GitHub Pages
- id: deployment
- uses: actions/deploy-pages@v4
diff --git a/src/content/docs/Users/System Management/index.mdx b/src/content/docs/Users/System Management/index.mdx
index 7b74c1a3..f78ab147 100644
--- a/src/content/docs/Users/System Management/index.mdx
+++ b/src/content/docs/Users/System Management/index.mdx
@@ -8,3 +8,5 @@ import DirectoryList from '@components/DirectoryList.astro';
Use this section to manage an installed system, from understanding where configuration lives to operating moss states safely.
+
+ Their may be multiple versions available with different desktop environments donoted by some person that wasn't present today.