Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
f27511a
Add GitHub Action for LanguageTool on PRs
CookieSource Jan 13, 2026
650be58
Add support for .mdx files in language tool workflow
CookieSource Jan 13, 2026
dc77791
Delete .github/workflows/deploy.yml
CookieSource Jan 13, 2026
c00b21e
Refactor LanguageTool workflow for clarity and efficiency
CookieSource Jan 13, 2026
072c78e
Update languagetool-pr.yml
CookieSource Jan 13, 2026
2c9d531
Update languagetool-pr.yml
CookieSource Jan 13, 2026
3114311
Update LanguageTool workflow for PR comments
CookieSource Jan 13, 2026
1ff1ed4
Refactor LanguageTool workflow for PR review
CookieSource Jan 13, 2026
fd877f3
Update languagetool-pr.yml
CookieSource Jan 13, 2026
6cfe2b6
Add languagetool_reviewdog.py script
CookieSource Jan 13, 2026
ebfebd1
Update languagetool-pr.yml
CookieSource Jan 13, 2026
d3d797f
Modify LanguageTool workflow for PR comments and Java setup
CookieSource Jan 13, 2026
c455ba3
Update languagetool.yml
CookieSource Jan 13, 2026
b51c3d2
Update languagetool.yml
CookieSource Jan 13, 2026
d6ef83d
PR after a working version
CookieSource Jan 13, 2026
1f7a335
Configure Dependabot for npm updates
CookieSource Jan 15, 2026
d24353f
Change Dependabot update interval to monthly
CookieSource Jan 15, 2026
259bf69
Refactor LanguageTool workflow for reviewdog integration
CookieSource Jan 15, 2026
a5900aa
Update languagetool.yml
CookieSource Jan 15, 2026
9d26753
Merge branch 'AerynOS:main' into main
CookieSource Jan 18, 2026
c22aacd
Update GitHub Actions workflow for grammar checks harper
CookieSource Jan 19, 2026
c341557
Update moss-state-management.mdx
CookieSource Jan 19, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "monthly"
allow:
- dependency-name: "astro"
- dependency-name: "@astrojs/starlight"
185 changes: 185 additions & 0 deletions .github/scripts/languagetool_reviewdog.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
#!/usr/bin/env python3
import argparse
import json
import os
import re
import subprocess
from typing import Dict, List, Optional, Set, Tuple

import requests


def sh(*args: str) -> str:
return subprocess.check_output(args, text=True).strip()


def offset_to_line_col(text: str, offset: int) -> Tuple[int, int]:
# reviewdog wants 1-based line/column
line = text.count("\n", 0, offset) + 1
last_nl = text.rfind("\n", 0, offset)
col = offset - (last_nl + 1) + 1
return line, col


def normalize_word(s: str) -> str:
s = re.sub(r"^[\W_]+|[\W_]+$", "", s, flags=re.UNICODE)
return s.lower()


def load_dictionary(path: str) -> Set[str]:
if not path or not os.path.exists(path):
return set()
words: Set[str] = set()
with open(path, "r", encoding="utf-8") as f:
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
words.add(line.lower())
return words


def changed_files(base_sha: str, head_sha: str) -> List[str]:
# list only changed files in the PR
out = sh("git", "diff", "--name-only", base_sha, head_sha)
files = [x.strip() for x in out.splitlines() if x.strip()]
return files


def is_text_file(path: str) -> bool:
ext = os.path.splitext(path)[1].lower()
return ext in {".md", ".txt", ".rst", ".adoc", ".asciidoc", ".tex"} or os.path.basename(path).lower() in {
"readme", "readme.md", "readme.txt"
}


def lt_check(api_url: str, language: str, text: str) -> Dict:
resp = requests.post(
api_url,
data={
"language": language,
"text": text,
},
timeout=60,
)
resp.raise_for_status()
return resp.json()


def main() -> int:
ap = argparse.ArgumentParser()
ap.add_argument("--api-url", required=True)
ap.add_argument("--language", required=True)
ap.add_argument("--base-sha", required=True)
ap.add_argument("--head-sha", required=True)
ap.add_argument("--dictionary", default=".languagetool/words.txt")
ap.add_argument("--max-suggestions", type=int, default=3)
args = ap.parse_args()

dict_words = load_dictionary(args.dictionary)

files = changed_files(args.base_sha, args.head_sha)
files = [f for f in files if os.path.exists(f) and is_text_file(f)]

diagnostics: List[Dict] = []

for path in files:
try:
with open(path, "r", encoding="utf-8") as f:
content = f.read()
except UnicodeDecodeError:
with open(path, "r", encoding="utf-8", errors="replace") as f:
content = f.read()

if not content.strip():
continue

try:
result = lt_check(args.api_url, args.language, content)
except Exception as e:
# Emit a single diagnostic if the API call fails for a file
diagnostics.append(
{
"message": f"LanguageTool API error for {path}: {e}",
"location": {"path": path, "range": {"start": {"line": 1, "column": 1}}},
"severity": "WARNING",
}
)
continue

matches = result.get("matches", [])
for m in matches:
offset = int(m.get("offset", 0))
length = int(m.get("length", 0))
bad = content[offset : offset + length]

rule = m.get("rule", {}) or {}
rule_id = rule.get("id") or "UNKNOWN_RULE"
category = (rule.get("category", {}) or {}).get("id", "")

# Cheap custom dictionary support without modifying LT server:
# if LT reports a spelling/typo-ish issue AND the token is in our dictionary -> ignore it.
# (Most spelling problems show up in category TYPOS and/or rule ids containing MORFOLOGIK.)
bad_norm = normalize_word(bad)
if dict_words and bad_norm:
looks_like_spelling = (category.upper() == "TYPOS") or ("MORFOLOGIK" in str(rule_id).upper())
if looks_like_spelling and (bad_norm in dict_words):
continue

start_line, start_col = offset_to_line_col(content, offset)
end_line, end_col = offset_to_line_col(content, offset + max(length, 0))

# Suggestions (as rdjson "suggestions" with ranges)
suggestions = []
repls = m.get("replacements", []) or []
for r in repls[: args.max_suggestions]:
val = r.get("value")
if not val:
continue
suggestions.append(
{
"range": {
"start": {"line": start_line, "column": start_col},
"end": {"line": end_line, "column": end_col},
},
"text": val,
}
)

code = {"value": rule_id}
urls = rule.get("urls") or []
if urls and isinstance(urls, list):
u = urls[0].get("value")
if u:
code["url"] = u

diagnostics.append(
{
"message": m.get("message") or "LanguageTool finding",
"location": {
"path": path,
"range": {
"start": {"line": start_line, "column": start_col},
"end": {"line": end_line, "column": end_col},
},
},
"severity": "WARNING",
"code": code,
**({"suggestions": suggestions} if suggestions else {}),
}
)

rdjson = {
"source": {
"name": "LanguageTool",
"url": "https://languagetool.org",
},
"diagnostics": diagnostics,
}

print(json.dumps(rdjson))
return 0


if __name__ == "__main__":
raise SystemExit(main())
43 changes: 0 additions & 43 deletions .github/workflows/deploy.yml

This file was deleted.

84 changes: 84 additions & 0 deletions .github/workflows/languagetool.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
name: Harper (grammar suggestions)

on:
pull_request:

permissions:
contents: read
pull-requests: write

jobs:
harper:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Install Harper
run: |
curl -fsSL https://raw.githubusercontent.com/automattic/harper/main/install.sh | sh
echo "$HOME/.harper/bin" >> $GITHUB_PATH

- name: Run Harper on changed md/mdx
id: harper
run: |
set -euo pipefail

mapfile -t FILES < <(
git diff --name-only origin/${{ github.base_ref }}...${{ github.sha }} \
-- '*.md' '*.mdx' || true
)

if [ ${#FILES[@]} -eq 0 ]; then
echo "No markdown files changed." > harper.txt
exit 0
fi

{
echo "## Harper grammar suggestions"
echo
for f in "${FILES[@]}"; do
[ -f "$f" ] || continue
echo "### $f"
harper "$f" || true
echo
done
} > harper.txt

- name: Post PR comment
uses: actions/github-script@v7
with:
script: |
const fs = require("fs");
const body = fs.readFileSync("harper.txt", "utf8").trim();

const marker = "<!-- harper-report -->";
const commentBody = `${marker}\n${body || "No issues found."}`;

const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});

const existing = comments.find(c =>
c.body && c.body.includes(marker)
);

if (existing) {
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
body: commentBody,
});
} else {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: commentBody,
});
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ Use the commands below to inspect and manage those states, discover software, an
```bash
moss state active
```

Their may be multiple versions available with different desktop environments donoted by `AerynOS-<version>-<desktop>-<architecture>.iso` where `<desktop>` is the desktop environment.
2. Review the state history when you need context for a rollback.

```bash
Expand Down