Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions .github/workflows/frontend-build-and-packaging.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: frontend-build-and-packaging

on:
push:
pull_request:
types: [opened, synchronize, reopened]

jobs:
build-and-packaging:
runs-on: ubuntu-22.04
steps:
- name: Checkout frontend sources
uses: actions/checkout@v4

- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'

- name: Display Node version
run: node -v && npm -v

- name: Install frontend dependencies
working-directory: frontend
run: npm ci

- name: Run build and package
working-directory: frontend
run: npm run package
86 changes: 86 additions & 0 deletions .github/workflows/frontend-e2e-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
name: frontend-e2e-tests

on:
push:
pull_request:
types: [opened, synchronize, reopened]

jobs:
e2e:
runs-on: ubuntu-22.04

steps:
- name: Checkout sources
uses: actions/checkout@v4


- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.13'

- name: Display Python version
run: python -c "import sys; print(sys.version)"

- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'

- name: Install dependencies
working-directory: frontend
run: npm ci

- name: Install Xvfb
run: sudo apt-get update && sudo apt-get install -y xvfb

- name: Define dataset list and cache key
id: datasets
run: |
urls=($(cat zenodo_datasets.txt))
echo "files=${urls[*]}" >> $GITHUB_OUTPUT

# Generate a hash key from the URLs list, this is used for caching
key=$(echo "${urls[*]}" | sha256sum | cut -d ' ' -f1)
echo "key=${key}" >> $GITHUB_OUTPUT

- name: Cache Zenodo datasets
id: cache-datasets
uses: actions/cache/restore@v4
with:
path: e2e_datasets
key: ${{ steps.datasets.outputs.key }}

- if: ${{ steps.cache-datasets.outputs.cache-hit != 'true' }}
name: Download datasets if not cached
run: |
mkdir -p e2e_datasets
for url in ${{ steps.datasets.outputs.files }}; do
echo "Downloading $(basename $url) ..."
wget -P e2e_datasets/ "$url"
done

- name: Always save Zenodo datasets (even if pytest would fail)
id: cache-datasets-save
if: always() && steps.cache-datasets.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
key: ${{ steps.cache-datasets.outputs.cache-primary-key }}
path: e2e_datasets

- name: Run E2E tests
working-directory: frontend
run: |
xvfb-run --auto-servernum --server-args="-screen 0 1920x1080x24" bash -c "
python3 -m venv ../ibex_venv
source ../ibex_venv/bin/activate
pip install --upgrade pip setuptools wheel
cd ../backend
pip install -e .
cd ../frontend
npm run start:e2e &
npx wait-on tcp:9222
npx wait-on http://127.0.0.1:8000/docs/
sleep 5
npm run test:e2e
"
33 changes: 33 additions & 0 deletions .github/workflows/frontend-linting.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: frontend-linting-and-formatting

on:
push:
pull_request:
types: [opened, synchronize, reopened]

jobs:
lint:
runs-on: ubuntu-22.04
steps:
- name: Checkout frontend sources
uses: actions/checkout@v4

- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'

- name: Display Node version
run: node -v && npm -v

- name: Install frontend dependencies
working-directory: frontend
run: npm ci

- name: Run code formatting
working-directory: frontend
run: npm run format

- name: Run linting
working-directory: frontend
run: npm run lint
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -105,4 +105,7 @@ saxon*.jar
# ASV folder
/.asv

.idea/
.idea/

# Local folder for E2E tests
e2e_datasets
Loading
Loading