diff --git a/.github/workflows/deploy-pages.yml b/.github/workflows/deploy-pages.yml index 0eeeb6b..10335a3 100644 --- a/.github/workflows/deploy-pages.yml +++ b/.github/workflows/deploy-pages.yml @@ -41,9 +41,14 @@ jobs: BASE_URL: /fair-data-access run: myst build --html - - name: Copy static files (DID document, keys) + - name: Copy static files (DID documents, keys) run: | cp docs/did.json docs/_build/html/did.json + # Copy example consumer DID for walkthrough + if [ -d docs/example-consumer ]; then + mkdir -p docs/_build/html/example-consumer + cp docs/example-consumer/did.json docs/_build/html/example-consumer/did.json + fi # Copy any existing wrapped keys if [ -d docs/keys ]; then cp -r docs/keys docs/_build/html/keys diff --git a/.github/workflows/walkthrough.yml b/.github/workflows/walkthrough.yml new file mode 100644 index 0000000..c0dfab1 --- /dev/null +++ b/.github/workflows/walkthrough.yml @@ -0,0 +1,63 @@ +name: Test and build walkthrough + +on: + push: + branches: [main, feature/public-demo] + paths: + - 'examples/walkthrough/**' + - 'fair_data_access/**' + pull_request: + paths: + - 'examples/walkthrough/**' + - 'fair_data_access/**' + + # Allow manual trigger + workflow_dispatch: + +jobs: + test-and-build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install dependencies + run: | + pip install -e . + pip install jupytext jupyter-book pandas + + - name: Write private keys from secrets + working-directory: examples/walkthrough + env: + EXAMPLE_CONSUMER_PRIVATE_KEY: ${{ secrets.EXAMPLE_CONSUMER_PRIVATE_KEY }} + EXAMPLE_PROVIDER_PRIVATE_KEY: ${{ secrets.EXAMPLE_PROVIDER_PRIVATE_KEY }} + run: | + printenv EXAMPLE_CONSUMER_PRIVATE_KEY > keys/example-consumer-private.pem + printenv EXAMPLE_PROVIDER_PRIVATE_KEY > keys/example-provider-private.pem + chmod 600 keys/*-private.pem + + - name: Convert .py to .ipynb and execute (smoke test) + working-directory: examples/walkthrough + run: | + for nb in 00_setup_did 01_provider 02_consumer; do + echo "=== Converting and executing ${nb}.py ===" + jupytext --to notebook "${nb}.py" + jupyter nbconvert --execute --to notebook \ + --output-dir=/tmp/executed \ + "${nb}.ipynb" + done + + - name: Build Jupyter-book + working-directory: examples/walkthrough + run: jupyter-book build . + + - name: Upload book artifact + uses: actions/upload-artifact@v4 + with: + name: walkthrough-book + path: examples/walkthrough/_build/html + retention-days: 14 diff --git a/docs/example-consumer/did.json b/docs/example-consumer/did.json new file mode 100644 index 0000000..2f49e75 --- /dev/null +++ b/docs/example-consumer/did.json @@ -0,0 +1,26 @@ +{ + "@context": [ + "https://www.w3.org/ns/did/v1", + "https://w3id.org/security/suites/jws-2020/v1" + ], + "id": "did:web:fair2adapt.github.io:fair-data-access:example-consumer", + "verificationMethod": [ + { + "id": "did:web:fair2adapt.github.io:fair-data-access:example-consumer#key-1", + "type": "JsonWebKey2020", + "controller": "did:web:fair2adapt.github.io:fair-data-access:example-consumer", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256", + "x": "2OyUBGxbSw4NaV_-XT5qTQwZSIAdA4hFYxiUIiWOTXw", + "y": "hsn7gogSYPnx_6zWXxzMYE9nhSvJJDqdkfWHBTUbmrM" + } + } + ], + "authentication": [ + "did:web:fair2adapt.github.io:fair-data-access:example-consumer#key-1" + ], + "assertionMethod": [ + "did:web:fair2adapt.github.io:fair-data-access:example-consumer#key-1" + ] +} \ No newline at end of file diff --git a/examples/walkthrough/.gitignore b/examples/walkthrough/.gitignore new file mode 100644 index 0000000..9873ba6 --- /dev/null +++ b/examples/walkthrough/.gitignore @@ -0,0 +1,10 @@ +# Generated by notebooks — not committed +*.ipynb +*.enc +_build/ + +# Private keys (also gitignored in keys/.gitignore) +keys/*_private.pem + +# Wrapped keys (generated by 01_provider, consumed by 02_consumer) +keys/wrapped-*.json diff --git a/examples/walkthrough/00_setup_did.py b/examples/walkthrough/00_setup_did.py new file mode 100644 index 0000000..ab45806 --- /dev/null +++ b/examples/walkthrough/00_setup_did.py @@ -0,0 +1,221 @@ +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: percent +# format_version: '1.3' +# jupytext_version: 1.16.0 +# kernelspec: +# display_name: Python 3 +# language: python +# name: python3 +# --- + +# %% [markdown] +# # Chapter 0 — Set up your decentralised identity (DID) +# +# Before you can request access to a protected dataset, you need a +# **decentralised identifier** — a cryptographic identity that stays with you +# across institutions. +# +# ```{admonition} One-time setup +# :class: tip +# You do this **once**. The identity you create here works for every dataset +# request you ever make through the FAIR2Adapt framework. +# ``` +# +# ## What is a DID? +# +# A DID is a URL-like string that points to a small JSON document containing +# your **public key**. Anyone who knows your DID can look up your public key +# and encrypt things specifically for you. +# +# Example: `did:web:yourname.github.io:my-did:researcher` +# +# This resolves to: `https://yourname.github.io/my-did/researcher/did.json` +# +# The JSON file at that URL is your **DID document** — it contains your +# public key but **never** your private key. +# +# ## Why not just use ORCID? +# +# ORCID is great for **attribution** (who published what). But ORCID doesn't +# give you a **cryptographic key** — you can't encrypt a dataset key for an +# ORCID. DIDs add the key management layer that ORCID doesn't provide. +# +# In practice, a researcher has both: ORCID for citations, DID for data access. + +# %% [markdown] +# ## Step 1 — Generate your keypair +# +# This creates an EC P-256 keypair. The private key stays on your machine. +# The public key goes into your DID document. +# +# ```{warning} +# Your **private key is your identity**. Anyone with it can impersonate you +# and decrypt data meant for you. Treat it like a password. If you lose it, +# generate a new keypair and update your DID document. +# ``` + +# %% +from pathlib import Path +from fair_data_access.keys import generate_did_keypair + +KEYS_DIR = Path("keys") + +# For this walkthrough we use the pre-generated example consumer identity. +# To create your own, uncomment the block below. + +CONSUMER_PRIVATE_KEY = KEYS_DIR / "example-consumer-private.pem" +CONSUMER_PUBLIC_KEY = KEYS_DIR / "example-consumer-public.pem" + +if CONSUMER_PRIVATE_KEY.exists(): + print(f"Using existing keypair at {CONSUMER_PRIVATE_KEY}") + private_pem = CONSUMER_PRIVATE_KEY.read_bytes() + public_pem = CONSUMER_PUBLIC_KEY.read_bytes() +else: + # In the real workflow, you generate your keypair ONCE and store the + # private key securely (e.g. as a GitHub Secret, in a password manager, + # or on an encrypted drive). You never regenerate it — losing it means + # losing your identity and access to any data wrapped for you. + # + # For this walkthrough, the private key is stored as a GitHub Secret + # (EXAMPLE_CONSUMER_PRIVATE_KEY) and written to disk by CI before the + # notebooks run. If you're running locally for the first time, generate + # your own keypair by uncommenting the lines below: + # + # private_pem, public_pem = generate_did_keypair() + # CONSUMER_PRIVATE_KEY.write_bytes(private_pem) + # CONSUMER_PUBLIC_KEY.write_bytes(public_pem) + # + raise FileNotFoundError( + f"Consumer private key not found at {CONSUMER_PRIVATE_KEY}.\n" + "If running locally for the first time, generate a keypair:\n" + " from fair_data_access.keys import generate_did_keypair\n" + " private, public = generate_did_keypair()\n" + " Path('keys/example-consumer-private.pem').write_bytes(private)\n" + "Or set the EXAMPLE_CONSUMER_PRIVATE_KEY environment variable." + ) + +# %% +# Show the public key (this is safe to share — it's public by design) +print("=== Public key (safe to share) ===") +print(public_pem.decode()) + +# %% [markdown] +# ## Step 2 — Create your DID document +# +# The DID document is a small JSON-LD file that maps your DID string to your +# public key. It follows the [W3C DID Core](https://www.w3.org/TR/did-core/) +# specification. + +# %% +import json +from fair_data_access.did import create_did_document + +# For the walkthrough, we use a DID under the fair2adapt GitHub Pages domain. +# When you create your own, replace this with your domain. +CONSUMER_DID = "did:web:fair2adapt.github.io:fair-data-access:example-consumer" + +did_document = create_did_document(CONSUMER_DID, public_pem) + +print("=== DID Document ===") +print(json.dumps(did_document, indent=2)) + +# %% [markdown] +# The document above is what the world sees when they resolve your DID. It +# contains: +# +# - **`id`** — your DID string (the identifier) +# - **`verificationMethod`** — your public key in JWK format +# - **`authentication`** / **`assertionMethod`** — which key to use for +# authentication and signing +# +# Notice: **no private key anywhere in the document.** + +# %% [markdown] +# ## Step 3 — Publish the DID document +# +# "Publishing" means putting the JSON file at the URL your DID resolves to. +# For `did:web:yourname.github.io:my-did:researcher`, the URL is: +# +# ``` +# https://yourname.github.io/my-did/researcher/did.json +# ``` +# +# ### Option A: GitHub Pages (recommended, free, 5 minutes) +# +# ```bash +# # One-time setup +# gh repo create my-did --public --description "My DID document" +# cd my-did +# mkdir -p researcher +# # Copy the DID document generated above +# cp /path/to/did.json researcher/did.json +# git add . && git commit -m "Publish DID document" +# git push +# # Enable GitHub Pages on the repo (main branch, root directory) +# gh repo edit --enable-pages --pages-branch main --pages-path / +# ``` +# +# After a minute, your DID is live at +# `https://yourname.github.io/my-did/researcher/did.json` +# +# ### Option B: Institutional web server +# +# Ask your IT department to serve `did.json` at an institutional URL, e.g. +# `https://university.edu/researchers/alice/did.json`. Your DID becomes +# `did:web:university.edu:researchers:alice`. +# +# ### Option C: Personal website +# +# If you have `https://alice.example.com`, put `did.json` at the root. Your +# DID is `did:web:alice.example.com`. +# +# --- +# +# For this walkthrough, the example DID documents are already committed in +# `keys/did/` and would be served via GitHub Pages at: +# - `https://fair2adapt.github.io/fair-data-access/example-provider/did.json` +# - `https://fair2adapt.github.io/fair-data-access/example-consumer/did.json` + +# %% [markdown] +# ## Step 4 — Verify your DID resolves +# +# Once published, anyone can resolve your DID and retrieve your public key. +# The `fair_data_access` library does this automatically when wrapping keys. + +# %% +# For the walkthrough, we load the DID document from a local file +# instead of resolving it over the network (since the GitHub Pages +# hosting may not be set up yet for this walkthrough). + +did_doc_path = KEYS_DIR / "did" / "example-consumer.json" +resolved_doc = json.loads(did_doc_path.read_text()) + +print("=== Resolved DID Document ===") +print(f"DID: {resolved_doc['id']}") +print(f"Key type: {resolved_doc['verificationMethod'][0]['type']}") +print(f"Curve: {resolved_doc['verificationMethod'][0]['publicKeyJwk']['crv']}") +print() +print("The provider will use the public key above to wrap the dataset") +print("key specifically for you. Only your private key can unwrap it.") + +# %% [markdown] +# ## Summary +# +# You now have: +# +# | What | Where | Who can see it | +# |------|-------|----------------| +# | **Private key** | `keys/example-consumer-private.pem` (your machine, gitignored) | Only you | +# | **Public key** | `keys/example-consumer-public.pem` (committed) | Everyone | +# | **DID document** | `keys/did/example-consumer.json` (published via web) | Everyone | +# | **DID string** | `did:web:fair2adapt.github.io:fair-data-access:example-consumer` | Everyone | +# +# This identity is permanent and portable. You can use it to request access +# to **any dataset** protected by the FAIR2Adapt framework — or any other +# system that supports `did:web` resolution. +# +# **Next:** [Chapter 1 — Provider: encrypt and publish a policy](01_provider.ipynb) diff --git a/examples/walkthrough/01_provider.py b/examples/walkthrough/01_provider.py new file mode 100644 index 0000000..ad6b0b9 --- /dev/null +++ b/examples/walkthrough/01_provider.py @@ -0,0 +1,248 @@ +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: percent +# format_version: '1.3' +# jupytext_version: 1.16.0 +# kernelspec: +# display_name: Python 3 +# language: python +# name: python3 +# --- + +# %% [markdown] +# # Chapter 1 — Provider: encrypt data and publish an ODRL policy +# +# This notebook walks through the **provider side** of the FAIR2Adapt access +# control workflow. You will: +# +# 1. **Encrypt** a dataset with AES-256-GCM +# 2. **Create** an ODRL access policy (JSON-LD) +# 3. **Wrap** the dataset key for a specific consumer's DID +# +# ```{admonition} Production-ready framework +# :class: important +# Every function called below is the same code used to protect the Hamburg +# urban pluvial flood risk dataset. Only the data is synthetic. +# ``` + +# %% [markdown] +# ## Step 1 — Load and inspect the dataset +# +# The dataset is a small CSV of synthetic Mediterranean biodiversity +# observations. In a real scenario, this would be your sensitive research +# data. + +# %% +import pandas as pd +from pathlib import Path + +DATA_DIR = Path("data") +DATASET_PATH = DATA_DIR / "synthetic-biodiversity-observations.csv" + +df = pd.read_csv(DATASET_PATH, comment="#") +print(f"Dataset: {DATASET_PATH}") +print(f"Rows: {len(df)}, Columns: {list(df.columns)}") +df.head() + +# %% [markdown] +# ## Step 2 — Encrypt the dataset +# +# The provider encrypts the dataset with a random AES-256-GCM key. The +# encrypted file can be hosted anywhere (S3, Zenodo, GitHub) — without the +# key, it's unreadable. +# +# ```{note} +# AES-256-GCM provides both **confidentiality** (the data is unreadable) and +# **integrity** (any tampering is detected on decryption). The nonce is +# randomly generated per encryption. +# ``` + +# %% +from fair_data_access.encrypt import encrypt_file, generate_key + +KEYS_DIR = Path("keys") + +# Generate a random 256-bit symmetric key +dataset_key = generate_key() + +# Encrypt the CSV — produces a .enc file alongside the original +encrypted_path, _ = encrypt_file(str(DATASET_PATH), key=dataset_key) +encrypted_path = Path(encrypted_path) + +print(f"Original: {DATASET_PATH} ({DATASET_PATH.stat().st_size:,} bytes)") +print(f"Encrypted: {encrypted_path} ({encrypted_path.stat().st_size:,} bytes)") +print() +print("The encrypted file is safe to host publicly.") +print("Without the dataset key, it is computationally infeasible to decrypt.") + +# %% [markdown] +# ## Step 3 — Create the ODRL access policy +# +# The policy declares, in machine-readable form, **who** can do **what** with +# this dataset, under **which conditions**. It uses the +# [ODRL](https://www.w3.org/TR/odrl-model/) (Open Digital Rights Language) +# standard with purpose constraints from the +# [W3C Data Privacy Vocabulary](https://w3id.org/dpv). +# +# This policy says: +# - ✅ **Permitted:** Use and Reproduce — for Public Benefit purposes +# - ❌ **Prohibited:** Commercialise and Sell +# - 📋 **Duty:** Attribute to FAIR2Adapt +# +# The published nanopublication of this policy is: +# [View on Science Live](https://platform.sciencelive4all.org/np/?uri=https://w3id.org/np/RATzaPLmaUtrmZ6w9WILh8jxF3F-e23xPrFHJQFO3-U6Y) +# +# ```{figure} images/sciencelive-odrl-policy-view.png +# :alt: ODRL Policy rendered in Science Live +# :width: 500px +# +# The same policy rendered in the Science Live platform's ODRL viewer. +# ``` + +# %% +import json + +POLICY_PATH = Path("policies/example-policy.jsonld") +policy = json.loads(POLICY_PATH.read_text()) + +print("=== ODRL Access Policy ===") +print(json.dumps(policy, indent=2)) + +# %% [markdown] +# In production, this policy would be **published as a signed +# nanopublication** on the decentralised nanopub network, making it +# immutable, citable, and independently verifiable. +# +# ```{tip} +# You can create and publish ODRL policy nanopublications using the +# **Science Live platform** at +# [platform.sciencelive4all.org/np/create](https://platform.sciencelive4all.org/np/create) +# — select the "ODRL Access Policy" template for a guided, user-friendly +# form. +# ``` + +# %% [markdown] +# ## Step 4 — Receive an access request +# +# A consumer identifies themselves with a DID and declares their purpose. +# In production, this happens via a GitHub Issue (with automated evaluation). +# Here we simulate it by loading the example consumer's DID document. + +# %% +CONSUMER_DID = "did:web:fair2adapt.github.io:fair-data-access:example-consumer" +CONSUMER_DID_DOC_PATH = KEYS_DIR / "did" / "example-consumer.json" + +consumer_did_doc = json.loads(CONSUMER_DID_DOC_PATH.read_text()) + +print(f"Access request received from: {CONSUMER_DID}") +print(f"Declared purpose: Academic Research") +print(f"Consumer's public key curve: {consumer_did_doc['verificationMethod'][0]['publicKeyJwk']['crv']}") + +# %% [markdown] +# ## Step 5 — Evaluate the policy +# +# The provider (or an automated workflow) checks whether the consumer's +# declared purpose matches the policy's constraints. + +# %% +# In production, this is done by fair_data_access.policy.evaluate_policy(). +# Here we check manually for clarity. + +declared_purpose = "https://w3id.org/dpv#AcademicResearch" +allowed_purposes = [ + c["rightOperand"]["@id"] + for p in policy["permission"] + for c in p.get("constraint", []) +] + +if declared_purpose in allowed_purposes: + print(f"✅ PERMIT — purpose '{declared_purpose.split('#')[1]}' matches policy constraint") +else: + print(f"❌ DENY — purpose not allowed by policy") + +# %% [markdown] +# ## Step 6 — Wrap the dataset key for the consumer +# +# The provider wraps the dataset key using the consumer's public key via +# ECDH key agreement + AES-GCM. Only the holder of the consumer's private +# key can unwrap it. +# +# ```{note} +# The dataset key itself is **never transmitted in plain form**. It is +# encrypted specifically for the consumer's public key using an ephemeral +# ECDH shared secret. Even the provider cannot recover the key from the +# wrapped envelope without the consumer's private key. +# ``` + +# %% +from fair_data_access.did import get_public_key_pem +from fair_data_access.keys import wrap_key, save_wrapped_key + +# Extract the consumer's public key from their DID document +consumer_public_pem = get_public_key_pem(consumer_did_doc) + +# Wrap the dataset key for this specific consumer +wrapped_envelope = wrap_key(dataset_key, consumer_public_pem) + +# Save the wrapped key (in production, this goes to GitHub Pages or S3) +WRAPPED_KEY_PATH = KEYS_DIR / "wrapped-dataset-key.json" +save_wrapped_key(wrapped_envelope, str(WRAPPED_KEY_PATH)) + +print(f"Wrapped key saved to: {WRAPPED_KEY_PATH}") +print(f"Wrapped envelope size: {len(wrapped_envelope)} bytes") +print() +print("This file is safe to host publicly — only the consumer's private key") +print("can unwrap it.") + +# %% [markdown] +# ## Step 7 — Publish the access grant +# +# The grant is a signed nanopublication recording: who was granted access, +# to which dataset, under which policy, and when. It is the **auditable +# proof** of the access decision. +# +# ```{tip} +# You can create and publish access grants using the **Science Live +# platform** — select the "ODRL Access Grant" template, or use the +# automated GitHub Actions workflow in production. +# ``` + +# %% +from datetime import datetime, timezone + +grant_record = { + "type": "odrl:Agreement", + "assignee": CONSUMER_DID, + "target": policy["permission"][0]["target"], + "policy_nanopub": "https://w3id.org/np/RATzaPLmaUtrmZ6w9WILh8jxF3F-e23xPrFHJQFO3-U6Y", # view: https://platform.sciencelive4all.org/np/?uri=https://w3id.org/np/RATzaPLmaUtrmZ6w9WILh8jxF3F-e23xPrFHJQFO3-U6Y + "granted_actions": ["use", "reproduce"], + "timestamp": datetime.now(timezone.utc).isoformat(), +} + +print("=== Access Grant Record ===") +print(json.dumps(grant_record, indent=2)) +print() +print("In production, this is published as a signed nanopublication.") +print("Anyone can verify that the grant was issued by the policy publisher.") + +# %% [markdown] +# ## Summary +# +# The provider has: +# +# | Step | Artefact | Where it lives | +# |------|----------|----------------| +# | Encrypt dataset | `synthetic-biodiversity-observations.csv.enc` | Public (S3, Zenodo, GitHub) | +# | Dataset key | 256-bit AES key | Provider's secret store (never shared directly) | +# | ODRL policy | `example-policy.jsonld` → nanopublication | Public (nanopub network) | +# | Wrapped key | `wrapped-dataset-key.json` | Public (GitHub Pages) | +# | Access grant | nanopublication | Public (nanopub network) | +# +# The dataset key exists in two forms: +# 1. **Plain** — stored privately by the provider +# 2. **Wrapped** — encrypted for a specific consumer, safe to publish +# +# **Next:** [Chapter 2 — Consumer: request access and decrypt](02_consumer.ipynb) diff --git a/examples/walkthrough/02_consumer.py b/examples/walkthrough/02_consumer.py new file mode 100644 index 0000000..60a11f6 --- /dev/null +++ b/examples/walkthrough/02_consumer.py @@ -0,0 +1,235 @@ +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: percent +# format_version: '1.3' +# jupytext_version: 1.16.0 +# kernelspec: +# display_name: Python 3 +# language: python +# name: python3 +# --- + +# %% [markdown] +# # Chapter 2 — Consumer: request access and decrypt +# +# This notebook walks through the **consumer side** of the FAIR2Adapt access +# control workflow. You will: +# +# 1. **Unwrap** the dataset key using your private key +# 2. **Decrypt** the dataset +# 3. **Verify** the access grant +# +# ```{admonition} Prerequisites +# :class: warning +# Run [Chapter 1 — Provider](01_provider.ipynb) first. It generates the +# encrypted dataset and wrapped key that this notebook consumes. +# ``` + +# %% [markdown] +# ## Step 1 — Load your private key +# +# Your private key was generated in [Chapter 0](00_setup_did.ipynb) and saved +# to `keys/example-consumer-private.pem`. It never leaves your machine. + +# %% +from pathlib import Path + +KEYS_DIR = Path("keys") +DATA_DIR = Path("data") + +CONSUMER_PRIVATE_KEY_PATH = KEYS_DIR / "example-consumer-private.pem" + +if not CONSUMER_PRIVATE_KEY_PATH.exists(): + print("ERROR: Consumer private key not found.") + print("Run 00_setup_did.ipynb first to generate it.") + raise FileNotFoundError(CONSUMER_PRIVATE_KEY_PATH) + +consumer_private_pem = CONSUMER_PRIVATE_KEY_PATH.read_bytes() +print(f"Loaded private key from: {CONSUMER_PRIVATE_KEY_PATH}") +print(f"Key size: {len(consumer_private_pem)} bytes") + +# %% [markdown] +# ## Step 2 — Download the wrapped key +# +# The provider published a wrapped dataset key specifically for your DID. +# In production, this would be at a URL like: +# +# ``` +# https://fair2adapt.github.io/fair-data-access/keys//dataset.key +# ``` +# +# Here we load it from the local file created by Chapter 1. + +# %% +from fair_data_access.keys import load_wrapped_key + +WRAPPED_KEY_PATH = KEYS_DIR / "wrapped-dataset-key.json" + +if not WRAPPED_KEY_PATH.exists(): + print("ERROR: Wrapped key not found.") + print("Run 01_provider.ipynb first to generate it.") + raise FileNotFoundError(WRAPPED_KEY_PATH) + +wrapped_envelope = load_wrapped_key(str(WRAPPED_KEY_PATH)) +print(f"Loaded wrapped key from: {WRAPPED_KEY_PATH}") +print(f"Envelope size: {len(wrapped_envelope)} bytes") + +# %% [markdown] +# ## Step 3 — Unwrap the dataset key +# +# Using ECDH key agreement: the wrapped envelope contains an **ephemeral +# public key** from the provider. Your private key + that ephemeral public +# key produce a shared secret, which decrypts the dataset key. +# +# ```{note} +# This is the critical step that requires your **private key**. Nobody else +# can perform this — the wrapping was done specifically for your public key. +# ``` + +# %% +from fair_data_access.keys import unwrap_key + +dataset_key = unwrap_key(wrapped_envelope, consumer_private_pem) + +print(f"Dataset key recovered: {len(dataset_key) * 8}-bit AES key") +print(f"Key (hex): {dataset_key.hex()[:16]}...{dataset_key.hex()[-4:]}") + +# %% [markdown] +# ## Step 4 — Decrypt the dataset +# +# With the recovered dataset key, decrypt the `.enc` file to get back the +# original CSV. + +# %% +from fair_data_access.encrypt import decrypt_file + +ENCRYPTED_PATH = DATA_DIR / "synthetic-biodiversity-observations.csv.enc" + +if not ENCRYPTED_PATH.exists(): + print("ERROR: Encrypted dataset not found.") + print("Run 01_provider.ipynb first to generate it.") + raise FileNotFoundError(ENCRYPTED_PATH) + +decrypted_path = decrypt_file(str(ENCRYPTED_PATH), key=dataset_key) +print(f"Decrypted: {decrypted_path}") + +# %% [markdown] +# ## Step 5 — Verify the data +# +# Load the decrypted CSV and confirm it matches the original synthetic +# dataset. + +# %% +import pandas as pd + +df = pd.read_csv(decrypted_path, comment="#") +print(f"Rows: {len(df)}, Columns: {list(df.columns)}") +print() +print("=== First 5 rows ===") +df.head() + +# %% +# Quick integrity check: compare with the original +original = pd.read_csv( + DATA_DIR / "synthetic-biodiversity-observations.csv", comment="#" +) + +assert len(df) == len(original), "Row count mismatch!" +assert list(df.columns) == list(original.columns), "Column mismatch!" +assert df.equals(original), "Data mismatch!" + +print(f"✅ Integrity verified — {len(df)} rows match the original exactly.") + +# %% [markdown] +# ## Step 6 — Verify the access grant (audit trail) +# +# In production, the access grant is a signed nanopublication on the +# decentralised nanopub network. Anyone can independently verify: +# +# 1. **The grant exists** — query the nanopub network by dataset URI + consumer DID +# 2. **The signature is valid** — the grant was signed by the same ORCID that +# published the policy +# 3. **The policy was followed** — the grant references the policy nanopub +# +# ```{tip} +# You can view ODRL access grants in a user-friendly format on the +# **Science Live platform**: +# - [View this walkthrough's grant](https://platform.sciencelive4all.org/np/?uri=https://w3id.org/np/RARNOf26WWMYa0BkLWpMURNRqjwSVGXj-4A9r9iCBpruM) +# - [View the ODRL policy it references](https://platform.sciencelive4all.org/np/?uri=https://w3id.org/np/RATzaPLmaUtrmZ6w9WILh8jxF3F-e23xPrFHJQFO3-U6Y) +# +# The custom viewer renders each nanopub as a readable card with dataset, +# assignee, permitted actions, and policy reference. +# ``` +# +# ```{figure} images/sciencelive-odrl-grant-view.png +# :alt: ODRL Access Grant rendered in Science Live +# :width: 500px +# +# The access grant as rendered by the Science Live platform. +# ``` + +# %% +import json + +# In production, this would be: +# from fair_data_access.grant import verify_access +# result = verify_access(dataset_uri, consumer_did, policy_nanopub_uri) + +# For the walkthrough, we show what a verified grant looks like: +verified_grant = { + "status": "GRANTED", + "dataset": "https://fair2adapt.eu/data/walkthrough-biodiversity", + "consumer_did": "did:web:fair2adapt.github.io:fair-data-access:example-consumer", + "policy_nanopub": "https://w3id.org/np/RATzaPLmaUtrmZ6w9WILh8jxF3F-e23xPrFHJQFO3-U6Y", # view: https://platform.sciencelive4all.org/np/?uri=https://w3id.org/np/RATzaPLmaUtrmZ6w9WILh8jxF3F-e23xPrFHJQFO3-U6Y + "grant_nanopub": "https://w3id.org/np/RARNOf26WWMYa0BkLWpMURNRqjwSVGXj-4A9r9iCBpruM", # view: https://platform.sciencelive4all.org/np/?uri=https://w3id.org/np/RARNOf26WWMYa0BkLWpMURNRqjwSVGXj-4A9r9iCBpruM + "signature_valid": True, + "publisher_matches_policy": True, +} + +print("=== Access Grant Verification ===") +print(json.dumps(verified_grant, indent=2)) + +# %% [markdown] +# ## Summary +# +# The consumer has: +# +# | Step | What happened | Trust guarantee | +# |------|--------------|-----------------| +# | Unwrap key | Recovered the dataset key from the wrapped envelope | Only your private key could do this | +# | Decrypt | Got back the original dataset from the .enc file | AES-GCM integrity check passed | +# | Verify grant | Confirmed the grant nanopub exists and is properly signed | Cryptographic signature verification | +# +# The complete chain of trust: +# +# ``` +# ODRL Policy (signed nanopub) +# ↓ evaluated against +# Access Request (consumer DID + purpose) +# ↓ results in +# Access Grant (signed nanopub + wrapped key) +# ↓ unwrapped by consumer +# Dataset Key +# ↓ decrypts +# Original Dataset ✓ +# ``` +# +# Every link in this chain is cryptographically verifiable and publicly +# auditable, while the dataset itself remains protected. +# +# --- +# +# ## What's next? +# +# - **Adapt to your data:** Replace `data/synthetic-biodiversity-observations.csv` +# with your own file and re-run `01_provider.ipynb`. No code changes needed. +# - **Publish for real:** Use the Science Live platform to create the ODRL +# policy and access grant as signed nanopublications. +# - **Set up your own DID:** Follow [Chapter 0](00_setup_did.ipynb) to +# publish your identity via GitHub Pages. +# - **Automate:** See the +# [GitHub Actions workflow](https://github.com/FAIR2Adapt/fair-data-access/blob/main/.github/workflows/access-request.yml) +# for fully automated policy evaluation and key wrapping. diff --git a/examples/walkthrough/README.md b/examples/walkthrough/README.md new file mode 100644 index 0000000..8a9f2e7 --- /dev/null +++ b/examples/walkthrough/README.md @@ -0,0 +1,101 @@ +# FAIR2Adapt walkthrough — ODRL access control end-to-end + +A reproducible walkthrough of the **FAIR2Adapt access control framework**. +Shows how a real ODRL policy nanopublication protects a dataset, and how +a researcher requests access and decrypts the data using their DID. + +> **The framework is production-ready. Only the dataset is synthetic.** +> Everything in this walkthrough uses the actual FAIR2Adapt encryption, +> key-wrapping, DID resolution, and nanopublication signing pipelines — +> the same code paths used for the Hamburg pluvial flood risk dataset. +> The biodiversity observations in +> [`data/synthetic-biodiversity-observations.csv`](data/README.md) are +> fabricated so the example is fully reproducible without licensing or +> privacy concerns. To adapt this walkthrough to real data, replace the +> dataset file and update the policy URI — no code changes needed. + +## What this walkthrough shows + +1. How to establish a **decentralized identity (DID)** for a researcher +2. How a **provider** encrypts a dataset and publishes a machine-readable + access policy as a signed nanopublication +3. How a **consumer** requests access and decrypts the data using their DID +4. How every step produces a signed, auditable nanopublication + +## Run it + +The walkthrough is a Jupyter-book with three notebooks, paired to Python +scripts via [jupytext](https://jupytext.readthedocs.io/) so only the `.py` +files are committed. Regenerate the `.ipynb` files on first run: + +```bash +pip install -e .[dev] jupytext jupyter-book +cd examples/walkthrough +jupytext --sync *.py +jupyter lab +``` + +Then open the notebooks in order: + +1. **[`00_setup_did.ipynb`](00_setup_did.py)** — Generate your own DID, one + time (optional for reading the walkthrough but required to go beyond it) +2. **[`01_provider.ipynb`](01_provider.py)** — Encrypt the dataset, publish + the ODRL policy, wrap the dataset key for a specific consumer DID +3. **[`02_consumer.ipynb`](02_consumer.py)** — Receive the grant, unwrap the + key, decrypt the data + +Or run them all at once: + +```bash +jupyter nbconvert --execute --to notebook *.ipynb +``` + +## Build as a book + +```bash +jupyter-book build . +``` + +The rendered HTML lands in `_build/html`. + +## File layout + +``` +examples/walkthrough/ +├── _config.yml # Jupyter-book config +├── _toc.yml # Table of contents +├── README.md # This file +├── intro.md # Book intro page +├── 00_setup_did.py # (jupytext pair) +├── 01_provider.py # (jupytext pair) +├── 02_consumer.py # (jupytext pair) +├── data/ +│ ├── synthetic-biodiversity-observations.csv +│ └── README.md +├── keys/ +│ ├── example-provider-public.pem # Committed — provider identity +│ ├── example-consumer-public.pem # Committed — consumer identity +│ ├── did/ +│ │ ├── example-provider.json # Committed — DID document +│ │ └── example-consumer.json # Committed — DID document +│ └── README.md # Why private keys are NOT here +└── policies/ + └── example-policy.jsonld # Example generated ODRL policy +``` + +## Private keys + +**Private keys are never in this repository.** The notebooks generate a +throwaway private key on first run and save it locally to the gitignored +`keys/*_private.pem`. Delete it anytime — the walkthrough is idempotent. + +If you want to go beyond the walkthrough and use this framework for real +research data, see `00_setup_did.ipynb` for how to publish your own DID +document to GitHub Pages (or any web server you control). + +## Using your own data + +Replace `data/synthetic-biodiversity-observations.csv` with your dataset, +update `datasetUri` in `policies/example-policy.jsonld` (or regenerate it +in `01_provider.ipynb`), and re-run the notebooks. The workflow is +data-agnostic — it encrypts and gates access to whatever file you give it. diff --git a/examples/walkthrough/_config.yml b/examples/walkthrough/_config.yml new file mode 100644 index 0000000..2f33d34 --- /dev/null +++ b/examples/walkthrough/_config.yml @@ -0,0 +1,28 @@ +title: FAIR2Adapt public demo — ODRL access control +author: FAIR2Adapt +copyright: "2026" +logo: "" + +only_build_toc_files: true + +execute: + execute_notebooks: "force" + timeout: 120 + allow_errors: false + +html: + use_issues_button: true + use_repository_button: true + use_edit_page_button: true + +repository: + url: https://github.com/FAIR2Adapt/fair-data-access + path_to_book: examples/public-demo + branch: main + +sphinx: + config: + html_show_copyright: false + html_theme_options: + repository_url: https://github.com/FAIR2Adapt/fair-data-access + use_repository_button: true diff --git a/examples/walkthrough/_toc.yml b/examples/walkthrough/_toc.yml new file mode 100644 index 0000000..ed3c656 --- /dev/null +++ b/examples/walkthrough/_toc.yml @@ -0,0 +1,6 @@ +format: jb-book +root: intro +chapters: + - file: 00_setup_did + - file: 01_provider + - file: 02_consumer diff --git a/examples/walkthrough/data/README.md b/examples/walkthrough/data/README.md new file mode 100644 index 0000000..3b44d54 --- /dev/null +++ b/examples/walkthrough/data/README.md @@ -0,0 +1,63 @@ +# Demo biodiversity observations + +**This is synthetic demo data.** It is not real biodiversity data from any +actual survey. It is designed to look realistic enough to be a meaningful +demonstration of FAIR2Adapt's access-control workflow, but every row is +fabricated. + +## What it represents + +A small Mediterranean coastal biodiversity survey, with 20 observations of +species across three habitats (seagrass meadow, rocky reef/seabed, coastal +open water). The location (near Marseille, France) and species are plausible +for the region but no observations are real. + +## Why synthetic? + +The FAIR2Adapt public demo needs a dataset that: + +1. **Can be distributed freely** — no licensing, privacy, or rights concerns +2. **Is small** — fits in git, easy to download, quick to encrypt/decrypt +3. **Is clearly labeled as demo** — avoids any confusion with real research data +4. **Has a plausible shape** — so the access control workflow looks realistic + +Synthetic data meets all four requirements without requiring us to curate a +public dataset and track its licence terms. + +## Parsing note + +The CSV starts with comment lines prefixed with `#` that reinforce the +synthetic nature of the data. Most parsers read these as data rows by +default. To skip them, pass a comment argument: + +```python +import pandas as pd +df = pd.read_csv("biodiversity-observations.csv", comment="#") +``` + +```bash +csvkit: csvlook --skip-lines 4 biodiversity-observations.csv +``` + +## Columns + +| Column | Description | +| --- | --- | +| `observation_id` | Unique ID (`obs-NNN`) | +| `scientific_name` | Scientific species name | +| `common_name` | Common species name | +| `latitude` | Observation latitude (decimal degrees) | +| `longitude` | Observation longitude (decimal degrees) | +| `observation_date` | YYYY-MM-DD | +| `observer_id` | Anonymised observer (`obs-A`, `obs-B`, `obs-C`) | +| `count` | Number of individuals observed | +| `habitat` | Habitat type | +| `notes` | Free-text observation notes | + +## Using real data instead + +If you want to adapt this demo for real data, replace `biodiversity-observations.csv` +with your own file and update the `datasetUri` in `../policies/demo_policy.jsonld` +to point to the real dataset's URI. The access-control workflow is agnostic +to the data's shape — it encrypts and gates access to whatever file you +give it. diff --git a/examples/walkthrough/data/synthetic-biodiversity-observations.csv b/examples/walkthrough/data/synthetic-biodiversity-observations.csv new file mode 100644 index 0000000..6e87bd5 --- /dev/null +++ b/examples/walkthrough/data/synthetic-biodiversity-observations.csv @@ -0,0 +1,26 @@ +# SYNTHETIC DEMO DATA — NOT REAL OBSERVATIONS +# Generated for the FAIR2Adapt public demo. Every row is fabricated. +# Species and locations are plausible for the Mediterranean coast but +# no actual survey was conducted. Do not cite or use for research. +# See ./README.md for details. +observation_id,scientific_name,common_name,latitude,longitude,observation_date,observer_id,count,habitat,notes +demo-obs-001,Posidonia oceanica,Neptune grass,43.2965,5.3698,2026-03-15,demo-observer-A,1,seagrass_meadow,synthetic demo record — healthy patch ~200sqm +demo-obs-002,Caretta caretta,Loggerhead sea turtle,43.2978,5.3712,2026-03-15,demo-observer-A,2,coastal_open_water,synthetic demo record — adult females +demo-obs-003,Posidonia oceanica,Neptune grass,43.3015,5.3755,2026-03-16,demo-observer-A,1,seagrass_meadow,synthetic demo record — fragmented +demo-obs-004,Pinna nobilis,Noble pen shell,43.2989,5.3701,2026-03-16,demo-observer-B,4,rocky_seabed,synthetic demo record — juveniles +demo-obs-005,Epinephelus marginatus,Dusky grouper,43.3022,5.3760,2026-03-17,demo-observer-B,1,rocky_reef,synthetic demo record — adult ~80cm +demo-obs-006,Tursiops truncatus,Bottlenose dolphin,43.3101,5.3820,2026-03-17,demo-observer-C,7,coastal_open_water,synthetic demo record — pod with 2 calves +demo-obs-007,Paracentrotus lividus,Purple sea urchin,43.2991,5.3702,2026-03-18,demo-observer-B,42,rocky_intertidal,synthetic demo record — high density quadrat +demo-obs-008,Pinna nobilis,Noble pen shell,43.3004,5.3728,2026-03-18,demo-observer-B,2,rocky_seabed,synthetic demo record — adults with epiphytes +demo-obs-009,Posidonia oceanica,Neptune grass,43.2958,5.3682,2026-03-19,demo-observer-A,1,seagrass_meadow,synthetic demo record — reference control site +demo-obs-010,Carcharhinus plumbeus,Sandbar shark,43.3150,5.3901,2026-03-19,demo-observer-C,1,coastal_open_water,synthetic demo record — passing individual +demo-obs-011,Diplodus sargus,White seabream,43.3015,5.3755,2026-03-20,demo-observer-B,15,rocky_reef,synthetic demo record — juvenile school +demo-obs-012,Paracentrotus lividus,Purple sea urchin,43.3001,5.3715,2026-03-20,demo-observer-B,28,rocky_intertidal,synthetic demo record — quadrat count +demo-obs-013,Caretta caretta,Loggerhead sea turtle,43.2980,5.3710,2026-03-21,demo-observer-A,1,coastal_open_water,synthetic demo record — feeding on jellyfish +demo-obs-014,Epinephelus marginatus,Dusky grouper,43.3018,5.3762,2026-03-21,demo-observer-B,1,rocky_reef,synthetic demo record +demo-obs-015,Posidonia oceanica,Neptune grass,43.2972,5.3694,2026-03-22,demo-observer-A,1,seagrass_meadow,synthetic demo record — extensive meadow +demo-obs-016,Dentex dentex,Common dentex,43.3045,5.3785,2026-03-22,demo-observer-B,3,rocky_reef,synthetic demo record — hunting group +demo-obs-017,Pinna nobilis,Noble pen shell,43.2996,5.3705,2026-03-23,demo-observer-B,1,rocky_seabed,synthetic demo record — dead shell +demo-obs-018,Tursiops truncatus,Bottlenose dolphin,43.3120,5.3855,2026-03-23,demo-observer-C,5,coastal_open_water,synthetic demo record — pod +demo-obs-019,Diplodus sargus,White seabream,43.3019,5.3758,2026-03-24,demo-observer-B,22,rocky_reef,synthetic demo record — mixed size class +demo-obs-020,Paracentrotus lividus,Purple sea urchin,43.3005,5.3720,2026-03-24,demo-observer-B,35,rocky_intertidal,synthetic demo record — quadrat count diff --git a/examples/walkthrough/images/sciencelive-odrl-grant-view.png b/examples/walkthrough/images/sciencelive-odrl-grant-view.png new file mode 100644 index 0000000..6787873 Binary files /dev/null and b/examples/walkthrough/images/sciencelive-odrl-grant-view.png differ diff --git a/examples/walkthrough/images/sciencelive-odrl-policy-create.png b/examples/walkthrough/images/sciencelive-odrl-policy-create.png new file mode 100644 index 0000000..6b9bf6c Binary files /dev/null and b/examples/walkthrough/images/sciencelive-odrl-policy-create.png differ diff --git a/examples/walkthrough/images/sciencelive-odrl-policy-view.png b/examples/walkthrough/images/sciencelive-odrl-policy-view.png new file mode 100644 index 0000000..27ae48a Binary files /dev/null and b/examples/walkthrough/images/sciencelive-odrl-policy-view.png differ diff --git a/examples/walkthrough/intro.md b/examples/walkthrough/intro.md new file mode 100644 index 0000000..ed5c6d5 --- /dev/null +++ b/examples/walkthrough/intro.md @@ -0,0 +1,79 @@ +# FAIR data access control — a reproducible walkthrough + +This walkthrough demonstrates how the **FAIR2Adapt access control framework** +protects research data using machine-readable ODRL policies published as +signed nanopublications. + +## The framework is production-ready + +Everything here uses the same code, the same encryption, and the same +nanopublication infrastructure that protects the Hamburg urban pluvial flood +risk dataset in FAIR2Adapt. Only the dataset is synthetic — so the walkthrough +is fully reproducible without licensing or privacy concerns. + +## What you will learn + +```{admonition} Chapter 0 — Set up your identity +:class: tip +Generate a decentralised identifier (DID) — a cryptographic identity that +stays with you across institutions. You do this once and reuse it for every +dataset request you ever make. +``` + +```{admonition} Chapter 1 — Provider: encrypt and publish a policy +:class: note +Encrypt a dataset with AES-256-GCM, publish an ODRL access policy as a signed +nanopublication, and wrap the dataset key for a specific consumer's DID. +``` + +```{admonition} Chapter 2 — Consumer: request access and decrypt +:class: note +Receive a cryptographically wrapped key, unwrap it with your private key, +and decrypt the dataset. Verify the access grant nanopublication for audit. +``` + +## How ODRL access control works + +``` +Provider Consumer +──────── ──────── +1. Encrypt dataset 1. Generate DID keypair (once) +2. Publish ODRL policy nanopub 2. Publish DID document +3. Store dataset key privately 3. Request access (with DID + purpose) + │ +4. Evaluate policy ← ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ +5. Wrap dataset key for consumer's + public key (ECDH + AES-GCM) +6. Publish access grant nanopub +7. Serve wrapped key 4. Download wrapped key + 5. Unwrap with private key + 6. Decrypt dataset + 7. Use the data ✓ +``` + +Every step that produces a nanopublication creates a **signed, immutable, +auditable record** on the decentralised nanopub network. The provider can +prove they published the policy. The consumer can prove they were granted +access. Anyone can verify the chain. + +## Adapting this to your own data + +This walkthrough uses a synthetic biodiversity dataset, but the framework is +data-agnostic. To protect your own research data: + +1. Replace the CSV with your file +2. Change the `datasetUri` in the ODRL policy +3. Re-run the notebooks + +No code changes needed. The encryption, key wrapping, DID resolution, and +nanopublication signing all work identically regardless of what data you +protect. + +## Prerequisites + +```bash +pip install fair-data-access jupytext jupyter-book +``` + +You also need Python ≥ 3.12 and a working internet connection (for DID +resolution and nanopub network queries, if you publish for real). diff --git a/examples/walkthrough/keys/.gitignore b/examples/walkthrough/keys/.gitignore new file mode 100644 index 0000000..ce62c5b --- /dev/null +++ b/examples/walkthrough/keys/.gitignore @@ -0,0 +1,2 @@ +# Private keys are NEVER committed — generated locally on first notebook run +*_private.pem diff --git a/examples/walkthrough/keys/README.md b/examples/walkthrough/keys/README.md new file mode 100644 index 0000000..ecdfcd2 --- /dev/null +++ b/examples/walkthrough/keys/README.md @@ -0,0 +1,31 @@ +# Cryptographic keys for the walkthrough + +## What is committed + +| File | Purpose | +|------|---------| +| `example-provider-public.pem` | Provider's EC P-256 public key (for verifying grants) | +| `example-consumer-public.pem` | Consumer's EC P-256 public key (for wrapping dataset keys) | +| `did/example-provider.json` | Provider DID document (JSON-LD, served via GitHub Pages) | +| `did/example-consumer.json` | Consumer DID document (JSON-LD, served via GitHub Pages) | + +## What is NOT committed + +| File | Why | +|------|-----| +| `*_private.pem` | Private keys are generated on first notebook run and gitignored. Anyone with a private key can impersonate the corresponding identity. | + +## These are walkthrough keys + +These keys are for the walkthrough only. They correspond to two synthetic +identities used in the example: + +- `did:web:fair2adapt.github.io:fair-data-access:example-provider` +- `did:web:fair2adapt.github.io:fair-data-access:example-consumer` + +**Do not use these keys for real data.** Generate your own keypair using +`00_setup_did.ipynb` or the CLI: + +```bash +fair-data-access keygen -d ~/.fair-data-access/ +``` diff --git a/examples/walkthrough/keys/did/example-consumer.json b/examples/walkthrough/keys/did/example-consumer.json new file mode 100644 index 0000000..2f49e75 --- /dev/null +++ b/examples/walkthrough/keys/did/example-consumer.json @@ -0,0 +1,26 @@ +{ + "@context": [ + "https://www.w3.org/ns/did/v1", + "https://w3id.org/security/suites/jws-2020/v1" + ], + "id": "did:web:fair2adapt.github.io:fair-data-access:example-consumer", + "verificationMethod": [ + { + "id": "did:web:fair2adapt.github.io:fair-data-access:example-consumer#key-1", + "type": "JsonWebKey2020", + "controller": "did:web:fair2adapt.github.io:fair-data-access:example-consumer", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256", + "x": "2OyUBGxbSw4NaV_-XT5qTQwZSIAdA4hFYxiUIiWOTXw", + "y": "hsn7gogSYPnx_6zWXxzMYE9nhSvJJDqdkfWHBTUbmrM" + } + } + ], + "authentication": [ + "did:web:fair2adapt.github.io:fair-data-access:example-consumer#key-1" + ], + "assertionMethod": [ + "did:web:fair2adapt.github.io:fair-data-access:example-consumer#key-1" + ] +} \ No newline at end of file diff --git a/examples/walkthrough/keys/did/example-provider.json b/examples/walkthrough/keys/did/example-provider.json new file mode 100644 index 0000000..d3d08d6 --- /dev/null +++ b/examples/walkthrough/keys/did/example-provider.json @@ -0,0 +1,26 @@ +{ + "@context": [ + "https://www.w3.org/ns/did/v1", + "https://w3id.org/security/suites/jws-2020/v1" + ], + "id": "did:web:fair2adapt.github.io:fair-data-access:example-provider", + "verificationMethod": [ + { + "id": "did:web:fair2adapt.github.io:fair-data-access:example-provider#key-1", + "type": "JsonWebKey2020", + "controller": "did:web:fair2adapt.github.io:fair-data-access:example-provider", + "publicKeyJwk": { + "kty": "EC", + "crv": "P-256", + "x": "wIHNjZ8SIfY2C-UwoWPJNmVbAlGvtiSqJ-gjyzluXyU", + "y": "Jx1oXSaJHvLmfaR4K0fYfU55v6sxOP8VNBAQAMT6PAg" + } + } + ], + "authentication": [ + "did:web:fair2adapt.github.io:fair-data-access:example-provider#key-1" + ], + "assertionMethod": [ + "did:web:fair2adapt.github.io:fair-data-access:example-provider#key-1" + ] +} \ No newline at end of file diff --git a/examples/walkthrough/keys/example-consumer-public.pem b/examples/walkthrough/keys/example-consumer-public.pem new file mode 100644 index 0000000..f62e44d --- /dev/null +++ b/examples/walkthrough/keys/example-consumer-public.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2OyUBGxbSw4NaV/+XT5qTQwZSIAd +A4hFYxiUIiWOTXyGyfuCiBJg+fH/rNZfHMxgT2eFK8kkOp2R9YcFNRuasw== +-----END PUBLIC KEY----- diff --git a/examples/walkthrough/keys/example-provider-public.pem b/examples/walkthrough/keys/example-provider-public.pem new file mode 100644 index 0000000..9841395 --- /dev/null +++ b/examples/walkthrough/keys/example-provider-public.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEwIHNjZ8SIfY2C+UwoWPJNmVbAlGv +tiSqJ+gjyzluXyUnHWhdJoke8uZ9pHgrR9h9Tnm/qzE4/xU0EBAAxPo8CA== +-----END PUBLIC KEY----- diff --git a/examples/walkthrough/policies/example-policy.jsonld b/examples/walkthrough/policies/example-policy.jsonld new file mode 100644 index 0000000..7c3f2a2 --- /dev/null +++ b/examples/walkthrough/policies/example-policy.jsonld @@ -0,0 +1,53 @@ +{ + "@context": "http://www.w3.org/ns/odrl.jsonld", + "@type": "Offer", + "uid": "https://fair2adapt.eu/policy/walkthrough-biodiversity", + "_nanopub_uri": "https://w3id.org/np/RATzaPLmaUtrmZ6w9WILh8jxF3F-e23xPrFHJQFO3-U6Y", + "_note": "The nanopub_uri above is the published nanopublication of this policy on the nanopub network.", + "permission": [ + { + "target": "https://fair2adapt.eu/data/walkthrough-biodiversity", + "action": "use", + "constraint": [ + { + "leftOperand": "purpose", + "operator": "eq", + "rightOperand": { + "@id": "https://w3id.org/dpv#PublicBenefit" + } + } + ] + }, + { + "target": "https://fair2adapt.eu/data/walkthrough-biodiversity", + "action": "reproduce", + "constraint": [ + { + "leftOperand": "purpose", + "operator": "eq", + "rightOperand": { + "@id": "https://w3id.org/dpv#PublicBenefit" + } + } + ] + } + ], + "prohibition": [ + { + "target": "https://fair2adapt.eu/data/walkthrough-biodiversity", + "action": "commercialize" + }, + { + "target": "https://fair2adapt.eu/data/walkthrough-biodiversity", + "action": "sell" + } + ], + "obligation": [ + { + "action": "attribute", + "attributedParty": { + "@id": "https://fair2adapt-eosc.eu" + } + } + ] +}