diff --git a/.changeset/config.json b/.changeset/config.json index cc95104ae..3cc2d41b1 100644 --- a/.changeset/config.json +++ b/.changeset/config.json @@ -1,5 +1,5 @@ { - "$schema": "https://unpkg.com/@changesets/config@3.1.1/schema.json", + "$schema": "https://unpkg.com/@changesets/config@3.1.2/schema.json", "changelog": [ "@svitejs/changesets-changelog-github-compact", { "repo": "TanStack/db" } @@ -10,5 +10,8 @@ "updateInternalDependencies": "patch", "fixed": [], "linked": [], - "ignore": [] + "ignore": [], + "___experimentalUnsafeOptions_WILL_CHANGE_IN_PATCH": { + "onlyUpdatePeerDependentsWhenOutOfRange": true + } } diff --git a/.changeset/in-memory-fallback-for-ssr.md b/.changeset/in-memory-fallback-for-ssr.md deleted file mode 100644 index 5afb61018..000000000 --- a/.changeset/in-memory-fallback-for-ssr.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"@tanstack/db": patch ---- - -Add in-memory fallback for localStorage collections in SSR environments - -Prevents errors when localStorage collections are imported on the server by automatically falling back to an in-memory store. This allows isomorphic JavaScript applications to safely import localStorage collection modules without errors during module initialization. - -When localStorage is not available (e.g., in server-side rendering environments), the collection automatically uses an in-memory storage implementation. Data will not persist across page reloads or be shared across tabs when using the in-memory fallback, but the collection will function normally otherwise. - -Fixes #691 diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 000000000..f3509559e --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,5 @@ +{ + "permissions": { + "allow": ["Bash(git checkout:*)", "Bash(npx sherif:*)"] + } +} diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 8fbf8f760..b39f31408 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,9 +1,9 @@ --- name: 🐛 Bug Report about: Create a report to help us improve -title: "" -labels: "" -assignees: "" +title: '' +labels: '' +assignees: '' --- - [ ] I've validated the bug against the latest version of DB packages diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml index 933a6edc4..e98a12985 100644 --- a/.github/workflows/autofix.yml +++ b/.github/workflows/autofix.yml @@ -18,14 +18,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5.0.0 + uses: actions/checkout@v6.0.1 with: fetch-depth: 0 - name: Setup Tools uses: tanstack/config/.github/setup@main - name: Fix formatting - run: pnpm prettier --ignore-unknown . --check + run: pnpm format - name: Apply fixes uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27 with: - commit-message: "ci: apply automated fixes" + commit-message: 'ci: apply automated fixes' diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml new file mode 100644 index 000000000..c4bb5e220 --- /dev/null +++ b/.github/workflows/claude.yml @@ -0,0 +1,27 @@ +name: Claude Code +on: + issue_comment: + types: [created] + pull_request_review_comment: + types: [created] +jobs: + claude: + if: | + (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude') && (github.event.comment.user.login == 'kevin-dp' || github.event.comment.user.login == 'KyleAMathews' || github.event.comment.user.login == 'samwillis')) || + (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude') && (github.event.comment.user.login == 'kevin-dp' || github.event.comment.user.login == 'KyleAMathews' || github.event.comment.user.login == 'samwillis')) + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + issues: write + id-token: write + actions: read # Required for Claude to read CI results on PRs + steps: + - name: Checkout code + uses: actions/checkout@v6.0.1 + with: + fetch-depth: 0 # required for Claude Code + - uses: anthropics/claude-code-action@v1 + with: + anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + # Responds to @claude mentions in comments diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml new file mode 100644 index 000000000..386636175 --- /dev/null +++ b/.github/workflows/e2e-tests.yml @@ -0,0 +1,77 @@ +name: E2E Tests + +on: + push: + branches: [main] + pull_request: + +jobs: + e2e-tests: + name: Run E2E Tests + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout code + uses: actions/checkout@v6.0.1 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.27.0 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'pnpm' + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Start Docker services + run: | + cd packages/db-collection-e2e/docker + docker compose up -d + echo "Waiting for services to be healthy..." + timeout 60 bash -c 'until docker compose ps | grep -q "healthy"; do sleep 2; done' + + - name: Build packages + run: | + pnpm --filter @tanstack/db-ivm build + pnpm --filter @tanstack/db build + pnpm --filter @tanstack/electric-db-collection build + pnpm --filter @tanstack/query-db-collection build + + - name: Run Electric E2E tests + run: | + cd packages/electric-db-collection + pnpm test:e2e + env: + ELECTRIC_URL: http://localhost:3000 + POSTGRES_HOST: localhost + POSTGRES_PORT: 54321 + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + POSTGRES_DB: e2e_test + + - name: Run Query E2E tests + run: | + cd packages/query-db-collection + pnpm test:e2e + env: + ELECTRIC_URL: http://localhost:3000 + + - name: Stop Docker services + if: always() + run: | + cd packages/db-collection-e2e/docker + docker compose down -v + + - name: Upload test results + if: failure() + uses: actions/upload-artifact@v4 + with: + name: test-results + path: packages/db-collection-e2e/junit/ + retention-days: 7 diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 472b8ec28..64525c50b 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -23,23 +23,37 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5.0.0 + uses: actions/checkout@v6.0.1 with: fetch-depth: 0 - name: Setup Tools uses: tanstack/config/.github/setup@main - name: Get base and head commits for `nx affected` - uses: nrwl/nx-set-shas@v4.3.3 + uses: nrwl/nx-set-shas@v4.4.0 with: main-branch-name: main - name: Run Checks - run: pnpm run lint && pnpm run build && pnpm run test + run: pnpm run build && pnpm run test + - name: Check dependency versions + run: | + if ! pnpm run test:sherif; then + echo "" + echo "==========================================" + echo "❌ Sherif found dependency version mismatches" + echo "" + echo "To fix this locally, run:" + echo " pnpm exec sherif --fix -i zod" + echo "" + echo "Then commit the changes to package.json files." + echo "==========================================" + exit 1 + fi preview: name: Preview runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5.0.0 + uses: actions/checkout@v6.0.1 with: fetch-depth: 0 - name: Setup Tools @@ -51,21 +65,23 @@ jobs: - name: Compressed Size Action - DB Package uses: preactjs/compressed-size-action@v2 with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" - pattern: "./packages/db/dist/**/*.{js,mjs}" - comment-key: "db-package-size" + repo-token: '${{ secrets.GITHUB_TOKEN }}' + pattern: './packages/db/dist/**/*.{js,mjs}' + comment-key: 'db-package-size' + build-script: 'build:minified' - name: Compressed Size Action - React DB Package uses: preactjs/compressed-size-action@v2 with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" - pattern: "./packages/react-db/dist/**/*.{js,mjs}" - comment-key: "react-db-package-size" + repo-token: '${{ secrets.GITHUB_TOKEN }}' + pattern: './packages/react-db/dist/**/*.{js,mjs}' + comment-key: 'react-db-package-size' + build-script: 'build:minified' build-example: name: Build Example Site runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5.0.0 + uses: actions/checkout@v6.0.1 - name: Setup Tools uses: tanstack/config/.github/setup@main - name: Build Packages @@ -79,7 +95,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5.0.0 + uses: actions/checkout@v6.0.1 - name: Setup Tools uses: tanstack/config/.github/setup@main - name: Build Packages @@ -87,4 +103,5 @@ jobs: - name: Build Starter Site run: | cd examples/react/projects + cp .env.example .env pnpm build diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 155b5ae7d..2e49cf139 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -23,7 +23,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5.0.0 + uses: actions/checkout@v6.0.1 with: fetch-depth: 0 - name: Setup Tools @@ -36,11 +36,36 @@ jobs: with: version: pnpm run changeset:version publish: pnpm run changeset:publish - commit: "ci: Version Packages" - title: "ci: Version Packages" + commit: 'ci: Version Packages' + title: 'ci: Version Packages' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + - name: Generate Docs + if: steps.changesets.outputs.published == 'true' + run: pnpm generate-docs + - name: Commit Generated Docs + if: steps.changesets.outputs.published == 'true' + run: | + if [ -n "$(git status --porcelain)" ]; then + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + BRANCH="docs/auto-update-$(date +%s)" + git checkout -b "$BRANCH" + git add docs/ + git commit -m "docs: regenerate API documentation" + git push origin "$BRANCH" + + gh pr create \ + --title "docs: regenerate API documentation" \ + --body "Automated documentation update from release" \ + --base main \ + --head "$BRANCH" + else + echo "No changes in generated docs" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Comment on PRs about release if: steps.changesets.outputs.published == 'true' uses: tanstack/config/.github/comment-on-release@main diff --git a/.github/workflows/reproduce-and-fix-issue-claude.yml b/.github/workflows/reproduce-and-fix-issue-claude.yml new file mode 100644 index 000000000..b4d810756 --- /dev/null +++ b/.github/workflows/reproduce-and-fix-issue-claude.yml @@ -0,0 +1,147 @@ +name: Claude Auto Issue Fix + +on: + issue_comment: + types: [created] + +jobs: + claude_auto_issue_fix: + # Only run when a maintainer comments with /reproduce on an issue + # Exclude Claude's own comments to prevent recursive triggers + if: | + github.event.issue != null && + github.actor != 'claude[bot]' && + contains(github.event.comment.body, '/reproduce') && + (github.event.comment.user.login == 'kevin-dp' || github.event.comment.user.login == 'KyleAMathews' || github.event.comment.user.login == 'samwillis') + runs-on: ubuntu-latest + # Prevent multiple runs for the same issue + concurrency: + group: claude-auto-issue-${{ github.repository }}-${{ github.event.issue.number }} + cancel-in-progress: false + + permissions: + contents: write + pull-requests: write + issues: write + id-token: write + actions: read + + steps: + - name: Checkout code + uses: actions/checkout@v6.0.1 + with: + fetch-depth: 0 + + - name: Run Claude Code (auto issue handler) + uses: anthropics/claude-code-action@v1 + with: + anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + claude_args: | + --model claude-opus-4-5 + --allowedTools Bash,Edit,Read,Write + prompt: | + You are an autonomous coding agent running in CI for this repository. + This workflow runs when a maintainer requests issue analysis by commenting "/reproduce" on an issue. + + === Context === + REPO: ${{ github.repository }} + ISSUE NUMBER: ${{ github.event.issue.number }} + ISSUE TITLE: ${{ github.event.issue.title }} + ISSUE URL: ${{ github.event.issue.html_url }} + + ISSUE BODY: + ${{ github.event.issue.body }} + + === Goal === + Apply a strict “repro test → fix” methodology. Produce exactly ONE of these outcomes: + 1) Repro PR + Fix PR (preferred for actionable bugs) + 2) Repro PR only (if reproducible but cannot fix) + 3) Comment-only “Needs info / Not actionable / Question answer draft” + No other outcomes. + + === Hard rules (non-negotiable) === + - Never claim reproduction unless a test fails due to a concrete behavioral assertion. + - Never use "does not throw / does not crash" as the primary assertion unless: + (a) the issue explicitly says throwing is the bug, AND + (b) the correct behavior is explicitly "should not throw". + - Never "fix" by skipping, swallowing, or ignoring the problematic case + (e.g. guard clauses / try-catch that hides the error) unless the issue explicitly says that is correct. + Any conditional handling must still implement the intended behavior. + - Do not weaken tests to make them pass. Do not remove assertions. Do not skip tests. Do not mark flaky. + - Keep changes minimal and scoped. No refactors. No drive-by formatting. + - If expected behavior is unclear or not testable, STOP and request the minimum missing info. + - NEVER add issue references (e.g. "issue #1152", "#1152", "Issue 1152") in the codebase itself. + This includes test names, code comments, variable names, or any other code artifacts. + Issue references are ONLY allowed in: branch names, commit messages, PR titles, PR bodies, and GitHub comments. + Test names and code comments should be purely descriptive of current behavior, no description of old behavior and do not reference where the bug was reported. + + === Step 0: Determine actionability === + From the issue, extract: + - Expected behavior (must be explicit & testable) + - Actual behavior + - Repro steps / minimal example + If you cannot extract a testable expected behavior: + - Post a GitHub comment requesting the minimum missing details (inputs, expected outputs, versions, etc.) + - Stop (no PRs) + + === Step 1: Reproduction test (behavioral oracle) === + Create a minimal unit/integration test that asserts the expected behavior: + - Test name must describe the intended behavior only (e.g. "updates cache entry when using computed query key"). + Do NOT include issue numbers or references in test names (BAD: "fixes issue #1152", "issue 1152 regression"). + - Assert concrete outcomes: returned values, state transitions, emitted events, persisted data, etc. + - Add at least one "anti-noop" assertion that would fail if the code simply "does nothing" + (e.g. verify a state change, returned value, side effect). + - If the issue involves an error, prefer asserting correct error type/message or correct recovery/result. + - Do NOT add comments in the test code referencing the issue number. + + === Step 2: Prove it reproduces on base === + Run tests on base: + - Confirm the new test FAILS due to assertion mismatch (not due to broken test setup). + If the test passes on base: + - Do NOT fake a repro by weakening assertions. + - If the issue seems intermittent, attempt to make repro deterministic; otherwise comment “not reproducible” and stop. + + Commit repro and open PR: + - Branch: ai/issue-${{ github.event.issue.number }}-repro + - Commit message: test: assert (issue #${{ github.event.issue.number }}) + - PR title: [repro] (issue #${{ github.event.issue.number }}) + - PR body: link issue + what test asserts + how to run + observed failure + + === Step 3: Fix (stacked on repro) === + Create fix branch from repro branch: + - Branch: ai/issue-${{ github.event.issue.number }}-fix + Implement the minimal fix. + Validation: + - Previously failing test MUST now pass. + - Fix must not be a no-op. + - If you add guards/conditionals, justify why it's correct behavior (not hiding bug), + and ensure intended work still occurs. + - Code comments should explain the "why" of the fix, NOT reference the issue number. + BAD: "Issue #1152: Fixed cache lookup" + GOOD: "Use the actual query key for cache lookup, not just the base key" + + Commit fix and open stacked PR: + - Commit message: fix: (issue #${{ github.event.issue.number }}) + - PR title: [fix] (issue #${{ github.event.issue.number }}) + - PR body: link issue + link repro PR + root cause + fix explanation + how to test + + === If stuck === + If you cannot fix without derailing: + - Still open the repro PR if it's valid (high value). + - Post a comment summarizing findings and blockers. + - Do NOT force a “fix” that silences symptoms. + + === Quality checklist (must satisfy before marking fixed) === + - [ ] Expected behavior asserted (not "no throw" unless truly correct) + - [ ] Test fails on base for the right reason + - [ ] Fix makes test pass without weakening assertions + - [ ] Fix is not "skip/swallow/ignore" + - [ ] Changes minimal and scoped + + === Final comment === + After completing your work, always post a comment on the original issue summarizing what you did: + - If you opened PRs: link to the repro PR and fix PR (if created) + - Briefly describe what the test asserts and what the fix does + - If you couldn't complete the task: explain what you found and what's blocking progress + + Proceed now. diff --git a/.github/workflows/review-pr-claude.yml b/.github/workflows/review-pr-claude.yml new file mode 100644 index 000000000..e40e97580 --- /dev/null +++ b/.github/workflows/review-pr-claude.yml @@ -0,0 +1,80 @@ +name: Claude PR Review + +on: + issue_comment: + types: [created] + +jobs: + claude_pr_review: + # Only run when a maintainer comments with /review on a PR + # Exclude Claude's own comments to prevent recursive triggers + if: | + github.event.issue.pull_request != null && + github.actor != 'claude[bot]' && + contains(github.event.comment.body, '/review') && + (github.event.comment.user.login == 'kevin-dp' || github.event.comment.user.login == 'KyleAMathews' || github.event.comment.user.login == 'samwillis') + runs-on: ubuntu-latest + # Prevent multiple concurrent reviews for the same PR + concurrency: + group: claude-pr-review-${{ github.repository }}-${{ github.event.issue.number }} + cancel-in-progress: false + + permissions: + contents: read + pull-requests: write + issues: write + id-token: write + actions: read + + steps: + - name: Checkout code + uses: actions/checkout@v6.0.1 + with: + fetch-depth: 0 + + - name: Get PR branch + id: pr-info + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + PR_DATA=$(gh pr view ${{ github.event.issue.number }} --json headRefName,baseRefName) + echo "head_ref=$(echo $PR_DATA | jq -r '.headRefName')" >> $GITHUB_OUTPUT + echo "base_ref=$(echo $PR_DATA | jq -r '.baseRefName')" >> $GITHUB_OUTPUT + + - name: Checkout PR branch + run: git checkout ${{ steps.pr-info.outputs.head_ref }} + + - name: Run Claude Code (PR review) + uses: anthropics/claude-code-action@v1 + with: + anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + plugin_marketplaces: 'https://github.com/anthropics/claude-code.git' + plugins: 'pr-review-toolkit' + claude_args: | + --model claude-opus-4-5 + --allowedTools Bash,Read,Glob,Grep,Task,Skill + prompt: | + You are an autonomous PR review agent running in CI for this repository. + + === Context === + REPO: ${{ github.repository }} + PR NUMBER: ${{ github.event.issue.number }} + PR TITLE: ${{ github.event.issue.title }} + PR URL: ${{ github.event.issue.html_url }} + BASE BRANCH: ${{ steps.pr-info.outputs.base_ref }} + HEAD BRANCH: ${{ steps.pr-info.outputs.head_ref }} + + COMMENT (may specify review aspects like "/review tests" or "/review errors code"): + ${{ github.event.comment.body }} + + === Task === + Run the /pr-review-toolkit:review-pr skill to perform a comprehensive multi-agent code review. + + Parse any review aspects from the comment (e.g., "tests", "errors", "code", "types", "comments", "simplify", "all"). + Pass these as arguments to the skill. Default to "all" if none specified. + + After the review completes, post the findings as a comment on PR #${{ github.event.issue.number }} using `gh pr comment`. + + This is a read-only review. Do NOT make any edits or commits. + + Proceed now. diff --git a/.gitignore b/.gitignore index 528e53f21..1cbcf6ed2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,153 +1,45 @@ -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -lerna-debug.log* -.pnpm-debug.log* - -# Diagnostic reports (https://nodejs.org/api/report.html) -report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json -# Runtime data -pids -*.pid -*.seed -*.pid.lock +# See https://help.github.com/ignore-files/ for more about ignoring files. -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov +# dependencies +node_modules +package-lock.json +yarn.lock -# Coverage directory used by tools like istanbul +# builds +build coverage -*.lcov - -# nyc test coverage -.nyc_output - -# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) -.grunt - -# Bower dependency directory (https://bower.io/) -bower_components - -# node-waf configuration -.lock-wscript - -# Compiled binary addons (https://nodejs.org/api/addons.html) -build/Release - -# Dependency directories -node_modules/ -jspm_packages/ - -# Snowpack dependency directory (https://snowpack.dev/) -web_modules/ - -# TypeScript cache -*.tsbuildinfo - -# Optional npm cache directory -.npm - -# Optional eslint cache -.eslintcache - -# Optional stylelint cache -.stylelintcache - -# Microbundle cache -.rpt2_cache/ -.rts2_cache_cjs/ -.rts2_cache_es/ -.rts2_cache_umd/ - -# Optional REPL history -.node_repl_history - -# Output of 'npm pack' -*.tgz - -# Yarn Integrity file -.yarn-integrity +dist -# dotenv environment variable files +# misc +.DS_Store .env +.env.local .env.development.local .env.test.local .env.production.local -.env.local - -# parcel-bundler cache (https://parceljs.org/) -.cache -.parcel-cache - -# Next.js build output .next -out -# Nuxt.js build / generate output -.nuxt -dist - -# Gatsby files -.cache/ -# Comment in the public line in if your project uses Gatsby and not Next.js -# https://nextjs.org/blog/next-9-1#public-directory-support -# public - -# vuepress build output -.vuepress/dist +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.history +size-plugin.json +stats-hydration.json +stats.json +stats.html +.vscode/settings.json -# vuepress v2.x temp and cache directory -.temp +*.log .cache - -# Docusaurus cache and generated files -.docusaurus - -# Serverless directories -.serverless/ - -# FuseBox cache -.fusebox/ - -# DynamoDB Local files -.dynamodb/ - -# TernJS port file -.tern-port - -# Stores VSCode versions used for testing VSCode extensions -.vscode-test - -# yarn v2 -.yarn/cache -.yarn/unplugged -.yarn/build-state.yml -.yarn/install-state.gz -.pnp.* -.DS_Store - -# Added by Task Master AI -dev-debug.log -# Environment variables -# Editor directories and files .idea -.vscode -*.suo -*.ntvs* -*.njsproj -*.sln -*.sw? -# OS specific -# Task files -tasks.json -tasks/ - -## Tanstack Start -.nitro -.output -.tanstack -.claude +.nx/cache +.nx/workspace-data +.pnpm-store +.tsup +.svelte-kit +.expo + +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +tsconfig.vitest-temp.json diff --git a/.husky/pre-commit b/.husky/pre-commit index a5a29d9f7..cb2c84d5c 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,4 +1 @@ -#!/usr/bin/env sh -. "$(dirname -- "$0")/_/husky.sh" - pnpm lint-staged diff --git a/.prettierrc b/.prettierrc deleted file mode 100644 index eaff0359c..000000000 --- a/.prettierrc +++ /dev/null @@ -1,5 +0,0 @@ -{ - "trailingComma": "es5", - "semi": false, - "tabWidth": 2 -} diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 000000000..0bfe39ff1 --- /dev/null +++ b/.tool-versions @@ -0,0 +1,2 @@ +pnpm 10.26.0 +nodejs 22.13.1 diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..6654f89bf --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,647 @@ +# Agent Coding Guidelines for TanStack DB + +This guide provides principles and patterns for AI agents contributing to the TanStack DB codebase. These guidelines are derived from PR review patterns and reflect the quality standards expected in this project. + +## Table of Contents + +1. [Type Safety](#type-safety) +2. [Code Organization](#code-organization) +3. [Algorithm Efficiency](#algorithm-efficiency) +4. [Semantic Correctness](#semantic-correctness) +5. [Abstraction Design](#abstraction-design) +6. [Code Clarity](#code-clarity) +7. [Testing Requirements](#testing-requirements) +8. [Function Design](#function-design) +9. [Modern JavaScript Patterns](#modern-javascript-patterns) +10. [Edge Cases and Corner Cases](#edge-cases-and-corner-cases) + +## Type Safety + +### Avoid `any` Types + +**❌ Bad:** + +```typescript +function processData(data: any) { + return data.value +} + +const result: any = someOperation() +``` + +**✅ Good:** + +```typescript +function processData(data: unknown) { + if (isDataObject(data)) { + return data.value + } + throw new Error('Invalid data') +} + +const result: TQueryData = someOperation() +``` + +**Key Principles:** + +- Use `unknown` instead of `any` when the type is truly unknown +- Provide proper type annotations for return values +- Use type guards to narrow `unknown` types safely +- If you find yourself using `any`, question whether there's a better type + +## Code Organization + +### Extract Common Logic + +**❌ Bad:** + +```typescript +// Duplicated logic in multiple places +function processA() { + const key = typeof value === 'number' ? `__number__${value}` : String(value) + // ... +} + +function processB() { + const key = typeof value === 'number' ? `__number__${value}` : String(value) + // ... +} +``` + +**✅ Good:** + +```typescript +function serializeKey(value: string | number): string { + return typeof value === 'number' ? `__number__${value}` : String(value) +} + +function processA() { + const key = serializeKey(value) + // ... +} + +function processB() { + const key = serializeKey(value) + // ... +} +``` + +### Organize Utilities + +**Key Principles:** + +- Extract serialization/deserialization logic into utility files +- When you see identical or near-identical code blocks, extract to a helper function +- Prefer small, focused utility functions over large inline implementations +- Move reusable logic into utility modules (e.g., `utils/`, `helpers/`) + +### Function Size and Complexity + +**❌ Bad:** + +```typescript +function syncData() { + // 200+ lines of logic handling multiple concerns + // - snapshot phase + // - buffering + // - sync state management + // - error handling + // all inline... +} +``` + +**✅ Good:** + +```typescript +function syncData() { + handleSnapshotPhase() + manageBuffering() + updateSyncState() + handleErrors() +} + +function handleSnapshotPhase() { + // Focused logic for snapshot phase +} +``` + +**Key Principle:** If a function is massive, extract logical sections into separate functions. This improves readability and maintainability. + +## Algorithm Efficiency + +### Be Mindful of Time Complexity + +**❌ Bad: O(n²) Queue Processing:** + +```typescript +// Processes elements in queue, but elements may need multiple passes +while (queue.length > 0) { + const job = queue.shift() + if (hasUnmetDependencies(job)) { + queue.push(job) // Re-queue, causing O(n²) behavior + } else { + processJob(job) + } +} +``` + +**✅ Good: Dependency-Aware Processing:** + +```typescript +// Use a data structure that respects dependencies +// Process only jobs with no unmet dependencies +// Consider topological sort for DAG-like structures +const readyJobs = jobs.filter((job) => !hasUnmetDependencies(job)) +readyJobs.forEach(processJob) +``` + +### Use Appropriate Data Structures + +**❌ Bad:** + +```typescript +// O(n) lookup for each check +const items = ['foo', 'bar', 'baz' /* hundreds more */] +if (items.includes(searchValue)) { + // ... +} +``` + +**✅ Good:** + +```typescript +// O(1) lookup +const items = new Set(['foo', 'bar', 'baz' /* hundreds more */]) +if (items.has(searchValue)) { + // ... +} +``` + +**Key Principles:** + +- For membership checks on large collections, use `Set` instead of `Array.includes()` +- Be aware of nested loops and their complexity implications +- Consider the worst-case scenario, especially for operations that could process many items +- Use appropriate data structures (Set for lookups, Map for key-value, etc.) + +## Semantic Correctness + +### Ensure Logic Matches Intent + +**❌ Bad:** + +```typescript +// Intending to check if subset limit is more restrictive than superset +function isLimitSubset( + subset: number | undefined, + superset: number | undefined, +) { + return subset === undefined || superset === undefined || subset <= superset +} + +// Problem: If subset has no limit but superset does, returns true (incorrect) +``` + +**✅ Good:** + +```typescript +function isLimitSubset( + subset: number | undefined, + superset: number | undefined, +) { + // Subset with no limit cannot be a subset of one with a limit + return superset === undefined || (subset !== undefined && subset <= superset) +} +``` + +### Validate Intersections and Unions + +When merging predicates or combining queries, ensure the semantics are correct: + +**Example Problem:** + +```sql +-- Query 1: WHERE age >= 18 LIMIT 1 +-- Query 2: WHERE age >= 20 LIMIT 3 +-- Naive intersection: WHERE age >= 20 LIMIT 1 +-- Problem: This may not return the actual intersection of results +``` + +**Key Principle:** Think carefully about what operations like intersection, union, and subset mean for your specific use case. Consider edge cases with limits, ordering, and predicates. + +## Abstraction Design + +### Avoid Leaky Abstractions + +**❌ Bad:** + +```typescript +class Collection { + getViewKey(key: TKey): string { + // Caller needs to know internal representation + return `${this._state.viewKeyPrefix}${key}` + } +} + +// Usage exposes internals +const viewKey = collection.getViewKey(key) +if (viewKey.startsWith(PREFIX)) { + /* ... */ +} +``` + +**✅ Good:** + +```typescript +class Collection { + getViewKey(key: TKey): string { + // Delegate to state manager, hiding implementation + return this._state.getViewKey(key) + } +} + +class CollectionStateManager { + getViewKey(key: TKey): string { + return `${this.viewKeyPrefix}${key}` + } +} +``` + +**Key Principles:** + +- Encapsulate implementation details within the responsible class +- Don't expose internal data structures or representations +- Use delegation to maintain clean boundaries between components +- Keep internal properties private when possible + +### Proper Encapsulation + +**Key Principle:** If you need to access a property or method from outside a class, add a public method that delegates to the internal implementation rather than exposing the internal property directly. + +## Code Clarity + +### Prefer Positive Predicates + +**❌ Bad:** + +```typescript +if (!refs.some((ref) => ref.path[0] === outerAlias)) { + // treat as safe +} +``` + +**✅ Good:** + +```typescript +if (refs.every((ref) => ref.path[0] !== outerAlias)) { + // treat as safe +} +``` + +**Key Principle:** Positive conditions (every, all) are generally easier to understand than negated conditions (not some). + +### Simplify Complex Conditions + +**❌ Bad:** + +```typescript +const isLoadingNow = this.pendingLoadSubsetPromises.size > 0 +if (isLoadingNow && !isLoadingNow) { + // Confusing logic +} +``` + +**✅ Good:** + +```typescript +const wasLoading = this.pendingLoadSubsetPromises.size > 0 +this.pendingLoadSubsetPromises.add(promise) +const isLoadingNow = this.pendingLoadSubsetPromises.size === 1 + +if (isLoadingNow) { + // Started loading +} +``` + +### Use Descriptive Names + +**❌ Bad:** + +```typescript +const viewKeysMap = new Map() // Type in name is redundant +const dependencyBuilders = [] // Sounds like functions that build +``` + +**✅ Good:** + +```typescript +const viewKeys = new Map() // Data structure not in name +const dependentBuilders = [] // Accurately describes dependents +``` + +**Key Principles:** + +- Avoid Hungarian notation (encoding type in variable name) +- Use names that describe the role or purpose, not the data structure +- Choose names that make the code read like prose +- Prefer `dependentBuilders` over `dependencyBuilders` when referring to things that depend on something + +## Testing Requirements + +### Always Add Tests for Bugs + +**Key Principle:** If you're fixing a bug, add a unit test that reproduces the bug before fixing it. This ensures: + +- The bug is actually fixed +- The bug doesn't regress in the future +- The fix is validated + +**Example:** + +```typescript +// Found a bug with fetchSnapshot resolving after up-to-date message +// Should add a test: +test('ignores snapshot that resolves after up-to-date message', async () => { + // Reproduce the corner case + // Verify it's handled correctly +}) +``` + +### Test Corner Cases + +Common corner cases to consider: + +- Empty arrays or sets +- Single-element collections +- `undefined` vs `null` values +- Operations on already-resolved promises +- Race conditions between async operations +- Limit/offset edge cases (0, 1, very large numbers) +- IN predicates with 0 or 1 elements + +## Function Design + +### Prefer Explicit Parameters Over Closures + +**❌ Bad:** + +```typescript +function outer() { + const config = getConfig() + const state = getState() + + const updateFn = () => { + // Closes over config and state + applyUpdate(config, state) + } + + scheduler.schedule(updateFn) +} +``` + +**✅ Good:** + +```typescript +function updateEntry(entry: Entry, config: Config, state: State) { + applyUpdate(entry, config, state) +} + +function outer() { + const config = getConfig() + const state = getState() + + scheduler.schedule({ + config, + state, + update: updateEntry, + }) +} +``` + +**Key Principles:** + +- Functions that take dependencies as arguments are easier to test +- Explicit parameters make data flow clearer +- Closures can hide dependencies and make code harder to follow +- Use closures when they genuinely simplify the code, but be intentional + +### Return Type Precision + +**❌ Bad:** + +```typescript +function serializeKey(key: string | number): unknown { + return String(key) +} +``` + +**✅ Good:** + +```typescript +function serializeKey(key: string | number): string { + return String(key) +} +``` + +**Key Principle:** Always provide the most precise return type. Avoid `unknown` or `any` return types unless truly necessary. + +## Modern JavaScript Patterns + +### Use Modern Operators + +**❌ Bad:** + +```typescript +if (firstError === undefined) { + firstError = error +} + +const value = cached !== null && cached !== undefined ? cached : defaultValue + +if (obj[key] === undefined) { + obj[key] = value +} +``` + +**✅ Good:** + +```typescript +firstError ??= error + +const value = cached ?? defaultValue + +obj[key] ??= value +``` + +### Use Spread Operator + +**❌ Bad:** + +```typescript +const combined = [] +for (const item of currentItems) { + combined.push(item) +} +for (const item of newItems) { + combined.push(item) +} +``` + +**✅ Good:** + +```typescript +const combined = [...currentItems, ...newItems] +``` + +### Simplify Array Operations + +**❌ Bad:** + +```typescript +const filtered = [] +for (const item of items) { + if (item.value > 0) { + filtered.push(item) + } +} +``` + +**✅ Good:** + +```typescript +const filtered = items.filter((item) => item.value > 0) +``` + +## Edge Cases and Corner Cases + +### Common Patterns to Consider + +1. **Key Encoding**: When converting keys to strings, ensure no collisions + + ```typescript + // ❌ Bad: numeric 1 and string "__number__1" collide + const key = typeof val === 'number' ? `__number__${val}` : String(val) + + // ✅ Good: proper encoding with type prefix + const key = `${typeof val}_${String(val)}` + ``` + +2. **Subset/Superset Logic**: Consider all cases + + ```typescript + // Consider: IN with 0, 1, or many elements + // Consider: EQ vs IN predicates + // Consider: Range predicates (>=, <=) vs equality + ``` + +3. **Limit and Offset**: Handle undefined, 0, and edge values + + ```typescript + // What happens when limit is 0? + // What happens when offset exceeds data length? + // What happens when limit is undefined? + ``` + +4. **Optional vs Required**: Be explicit about optionality + + ```typescript + // ❌ Why is this optional? + interface Config { + collection?: Collection + } + + // ✅ Document or make required if always needed + interface Config { + collection: Collection // Always required for query collections + } + ``` + +5. **Race Conditions**: Async operations may resolve in unexpected order + ```typescript + // Request snapshot before receiving up-to-date + // But snapshot resolves after up-to-date arrives + // Should ignore the stale snapshot + ``` + +## Package Versioning + +### Understand Semantic Versioning + +**Common Mistake:** + +```json +{ + "dependencies": { + "package": "^0.0.0" + } +} +``` + +**Problem:** `^0.0.0` restricts to exactly `0.0.0`, not "latest 0.0.x" as you might expect. + +From [npm semver docs](https://github.com/npm/node-semver): + +> Caret Ranges allow changes that do not modify the left-most non-zero element. For versions `0.0.X`, this means no updates. + +**Solutions:** + +- Use `*` for any version +- Use `latest` for the latest version +- Use a proper range like `^0.1.0` if that's what you mean + +## Documentation and Comments + +### Keep Useful Comments + +**Good Comment:** + +```typescript +// Returning false signals that callers should schedule another pass +return allDone +``` + +**Good Comment:** + +```typescript +// This step is necessary because the query function has captured +// the old subscription instance in its closure +``` + +### Remove Outdated Comments + +**Key Principle:** When refactoring code, update or remove comments that reference old function names or outdated logic. + +## General Principles + +1. **Question Optionality**: If a property is optional, understand why. Often it should be required. + +2. **Consider Performance**: Before implementing, think about time complexity, especially for operations that might process many items. + +3. **Validate Semantics**: Ensure that your implementation actually does what you think it does. Consider edge cases. + +4. **Avoid Premature Complexity**: Don't add ternaries, special cases, or checks for things that can't happen. + +5. **Test First for Bugs**: Reproduce bugs in tests before fixing them. + +6. **Be Consistent**: Follow naming conventions and patterns used elsewhere in the codebase. + +7. **Simplify**: Modern JavaScript provides many concise operators and methods. Use them. + +8. **Encapsulate**: Hide implementation details. Use delegation and proper abstraction boundaries. + +9. **Type Precisely**: Use the most specific type possible. Avoid `any`. + +10. **Extract When Duplicating**: If you're writing the same logic twice, extract it. + +## When in Doubt + +If you're unsure about an implementation decision: + +1. Look for similar patterns in the existing codebase +2. Consider the worst-case scenario for performance +3. Think about edge cases and corner cases +4. Ask: "Does this abstraction leak implementation details?" +5. Ask: "Would this be easy to test?" +6. Ask: "Is this as simple as it could be?" + +Remember: Simple, well-typed, well-tested code with clear abstractions is the goal. We raise the standard of code quality—not through complexity, but through clarity and correctness. diff --git a/README.md b/README.md index 075d8f756..9b12f0958 100644 --- a/README.md +++ b/README.md @@ -37,12 +37,15 @@ > Tanstack DB is currently in BETA. See [the release post](https://tanstack.com/blog/tanstack-db-0.1-the-embedded-client-database-for-tanstack-query) for more details. -A reactive client store that lets you build fast, sync‑driven apps with a backend‑agnostic real‑time data layer: +The reactive client store for your API. -- Blazing‑fast query engine for sub‑millisecond live queries, joins & aggregates -- Fine‑grained reactivity to minimize component re‑rendering -- Robust transaction primitives for optimistic mutations with sync & lifecycle support -- Normalized data model that keeps backends simple and consistent +TanStack DB solves the problems of building fast, modern apps, helping you: + +- Avoid endpoint sprawl and network waterfalls by loading data into normalized collections +- Optimise client performance with sub-millisecond live queries and real-time reactivity +- Take the network off the interaction path with instant optimistic writes + +Data loading is optimized. Interactions feel instantaneous. Your backend stays simple and your app stays blazing fast. No matter how much data you load. Read the docs →
diff --git a/docs/collections/local-only-collection.md b/docs/collections/local-only-collection.md new file mode 100644 index 000000000..17bf51ef4 --- /dev/null +++ b/docs/collections/local-only-collection.md @@ -0,0 +1,324 @@ +--- +title: LocalOnly Collection +--- + +# LocalOnly Collection + +LocalOnly collections are designed for in-memory client data or UI state that doesn't need to persist across browser sessions or sync across tabs. + +## Overview + +The `localOnlyCollectionOptions` allows you to create collections that: +- Store data only in memory (no persistence) +- Support optimistic updates with automatic rollback on errors +- Provide optional initial data +- Work perfectly for temporary UI state and session-only data +- Automatically manage the transition from optimistic to confirmed state + +## Installation + +LocalOnly collections are included in the core TanStack DB package: + +```bash +npm install @tanstack/react-db +``` + +## Basic Usage + +```typescript +import { createCollection } from '@tanstack/react-db' +import { localOnlyCollectionOptions } from '@tanstack/react-db' + +const uiStateCollection = createCollection( + localOnlyCollectionOptions({ + id: 'ui-state', + getKey: (item) => item.id, + }) +) +``` + +### Direct Local Mutations + +**Important:** LocalOnly collections work differently than server-synced collections. With LocalOnly collections, you **directly mutate state** by calling methods like `collection.insert()`, `collection.update()`, and `collection.delete()` — that's all you need to do. The changes are immediately applied to your local in-memory data. + +This is different from collections that sync with a server (like Query Collection), where mutation handlers send data to a backend. With LocalOnly collections, everything stays local: + +```typescript +// Just call the methods directly - no server sync involved +uiStateCollection.insert({ id: 'theme', mode: 'dark' }) +uiStateCollection.update('theme', (draft) => { draft.mode = 'light' }) +uiStateCollection.delete('theme') +``` + +## Configuration Options + +The `localOnlyCollectionOptions` function accepts the following options: + +### Required Options + +- `id`: Unique identifier for the collection +- `getKey`: Function to extract the unique key from an item + +### Optional Options + +- `schema`: [Standard Schema](https://standardschema.dev) compatible schema (e.g., Zod, Effect) for client-side validation +- `initialData`: Array of items to populate the collection with on creation +- `onInsert`: Optional handler function called before confirming inserts +- `onUpdate`: Optional handler function called before confirming updates +- `onDelete`: Optional handler function called before confirming deletes + +## Initial Data + +Populate the collection with initial data on creation: + +```typescript +const uiStateCollection = createCollection( + localOnlyCollectionOptions({ + id: 'ui-state', + getKey: (item) => item.id, + initialData: [ + { id: 'sidebar', isOpen: false }, + { id: 'theme', mode: 'light' }, + { id: 'modal', visible: false }, + ], + }) +) +``` + +## Mutation Handlers + +Mutation handlers are **completely optional**. When provided, they are called before the optimistic state is confirmed: + +```typescript +const tempDataCollection = createCollection( + localOnlyCollectionOptions({ + id: 'temp-data', + getKey: (item) => item.id, + onInsert: async ({ transaction }) => { + // Custom logic before confirming the insert + console.log('Inserting:', transaction.mutations[0].modified) + }, + onUpdate: async ({ transaction }) => { + // Custom logic before confirming the update + const { original, modified } = transaction.mutations[0] + console.log('Updating from', original, 'to', modified) + }, + onDelete: async ({ transaction }) => { + // Custom logic before confirming the delete + console.log('Deleting:', transaction.mutations[0].original) + }, + }) +) +``` + +## Manual Transactions + +When using LocalOnly collections with manual transactions (created via `createTransaction`), you must call `utils.acceptMutations()` to persist the changes: + +```typescript +import { createTransaction } from '@tanstack/react-db' + +const localData = createCollection( + localOnlyCollectionOptions({ + id: 'form-draft', + getKey: (item) => item.id, + }) +) + +const serverCollection = createCollection( + queryCollectionOptions({ + queryKey: ['items'], + queryFn: async () => api.items.getAll(), + getKey: (item) => item.id, + onInsert: async ({ transaction }) => { + await api.items.create(transaction.mutations[0].modified) + }, + }) +) + +const tx = createTransaction({ + mutationFn: async ({ transaction }) => { + // Handle server collection mutations explicitly in mutationFn + await Promise.all( + transaction.mutations + .filter((m) => m.collection === serverCollection) + .map((m) => api.items.create(m.modified)) + ) + + // After server mutations succeed, accept local collection mutations + localData.utils.acceptMutations(transaction) + }, +}) + +// Apply mutations to both collections in one transaction +tx.mutate(() => { + localData.insert({ id: 'draft-1', data: '...' }) + serverCollection.insert({ id: '1', name: 'Item' }) +}) + +await tx.commit() +``` + +## Complete Example: Modal State Management + +```typescript +import { createCollection, eq } from '@tanstack/react-db' +import { localOnlyCollectionOptions } from '@tanstack/react-db' +import { useLiveQuery } from '@tanstack/react-db' +import { z } from 'zod' + +// Define schema +const modalStateSchema = z.object({ + id: z.string(), + isOpen: z.boolean(), + data: z.any().optional(), +}) + +type ModalState = z.infer + +// Create collection +export const modalStateCollection = createCollection( + localOnlyCollectionOptions({ + id: 'modal-state', + getKey: (item) => item.id, + schema: modalStateSchema, + initialData: [ + { id: 'user-profile', isOpen: false }, + { id: 'settings', isOpen: false }, + { id: 'confirm-delete', isOpen: false }, + ], + }) +) + +// Use in component +function UserProfileModal() { + const { data: modals } = useLiveQuery((q) => + q.from({ modal: modalStateCollection }) + .where(({ modal }) => eq(modal.id, 'user-profile')) + ) + + const modalState = modals[0] + + const openModal = (data?: any) => { + modalStateCollection.update('user-profile', (draft) => { + draft.isOpen = true + draft.data = data + }) + } + + const closeModal = () => { + modalStateCollection.update('user-profile', (draft) => { + draft.isOpen = false + draft.data = undefined + }) + } + + if (!modalState?.isOpen) return null + + return ( +
+

User Profile

+
{JSON.stringify(modalState.data, null, 2)}
+ +
+ ) +} +``` + +## Complete Example: Form Draft State + +```typescript +import { createCollection, eq } from '@tanstack/react-db' +import { localOnlyCollectionOptions } from '@tanstack/react-db' +import { useLiveQuery } from '@tanstack/react-db' + +type FormDraft = { + id: string + formData: Record + lastModified: Date +} + +// Create collection for form drafts +export const formDraftsCollection = createCollection( + localOnlyCollectionOptions({ + id: 'form-drafts', + getKey: (item) => item.id, + }) +) + +// Use in component +function CreatePostForm() { + const { data: drafts } = useLiveQuery((q) => + q.from({ draft: formDraftsCollection }) + .where(({ draft }) => eq(draft.id, 'new-post')) + ) + + const currentDraft = drafts[0] + + const updateDraft = (field: string, value: any) => { + if (currentDraft) { + formDraftsCollection.update('new-post', (draft) => { + draft.formData[field] = value + draft.lastModified = new Date() + }) + } else { + formDraftsCollection.insert({ + id: 'new-post', + formData: { [field]: value }, + lastModified: new Date(), + }) + } + } + + const clearDraft = () => { + if (currentDraft) { + formDraftsCollection.delete('new-post') + } + } + + const submitForm = async () => { + if (!currentDraft) return + + await api.posts.create(currentDraft.formData) + clearDraft() + } + + return ( +
{ e.preventDefault(); submitForm() }}> + updateDraft('title', e.target.value)} + /> + + +
+ ) +} +``` + +## Use Cases + +LocalOnly collections are perfect for: +- Temporary UI state (modals, sidebars, tooltips) +- Form draft data during the current session +- Client-side computed or derived data +- Wizard/multi-step form state +- Temporary filters or search state +- In-memory caches + +## Comparison with LocalStorageCollection + +| Feature | LocalOnly | LocalStorage | +|---------|-----------|--------------| +| Persistence | None (in-memory only) | localStorage | +| Cross-tab sync | No | Yes | +| Survives page reload | No | Yes | +| Performance | Fastest | Fast | +| Size limits | Memory limits | ~5-10MB | +| Best for | Temporary UI state | User preferences | + +## Learn More + +- [Optimistic Mutations](../guides/mutations.md) +- [Live Queries](../guides/live-queries.md) +- [LocalStorage Collection](./local-storage-collection.md) diff --git a/docs/collections/local-storage-collection.md b/docs/collections/local-storage-collection.md new file mode 100644 index 000000000..171e5cb9b --- /dev/null +++ b/docs/collections/local-storage-collection.md @@ -0,0 +1,312 @@ +--- +title: LocalStorage Collection +--- + +# LocalStorage Collection + +LocalStorage collections store small amounts of local-only state that persists across browser sessions and syncs across browser tabs in real-time. + +## Overview + +The `localStorageCollectionOptions` allows you to create collections that: +- Persist data to localStorage (or sessionStorage) +- Automatically sync across browser tabs using storage events +- Support optimistic updates with automatic rollback on errors +- Store all data under a single localStorage key +- Work with any storage API that matches the localStorage interface + +## Installation + +LocalStorage collections are included in the core TanStack DB package: + +```bash +npm install @tanstack/react-db +``` + +## Basic Usage + +```typescript +import { createCollection } from '@tanstack/react-db' +import { localStorageCollectionOptions } from '@tanstack/react-db' + +const userPreferencesCollection = createCollection( + localStorageCollectionOptions({ + id: 'user-preferences', + storageKey: 'app-user-prefs', + getKey: (item) => item.id, + }) +) +``` + +### Direct Local Mutations + +**Important:** LocalStorage collections work differently than server-synced collections. With LocalStorage collections, you **directly mutate state** by calling methods like `collection.insert()`, `collection.update()`, and `collection.delete()` — that's all you need to do. The changes are immediately applied to your local data and automatically persisted to localStorage. + +This is different from collections that sync with a server (like Query Collection), where mutation handlers send data to a backend. With LocalStorage collections, everything stays local: + +```typescript +// Just call the methods directly - automatically persisted to localStorage +userPreferencesCollection.insert({ id: 'theme', mode: 'dark' }) +userPreferencesCollection.update('theme', (draft) => { draft.mode = 'light' }) +userPreferencesCollection.delete('theme') +``` + +## Configuration Options + +The `localStorageCollectionOptions` function accepts the following options: + +### Required Options + +- `id`: Unique identifier for the collection +- `storageKey`: The localStorage key where all collection data is stored +- `getKey`: Function to extract the unique key from an item + +### Optional Options + +- `schema`: [Standard Schema](https://standardschema.dev) compatible schema (e.g., Zod, Effect) for client-side validation +- `storage`: Custom storage implementation (defaults to `localStorage`). Can be `sessionStorage` or any object with the localStorage API +- `storageEventApi`: Event API for subscribing to storage events (defaults to `window`). Enables custom cross-tab, cross-window, or cross-process synchronization +- `onInsert`: Optional handler function called when items are inserted +- `onUpdate`: Optional handler function called when items are updated +- `onDelete`: Optional handler function called when items are deleted + +## Cross-Tab Synchronization + +LocalStorage collections automatically sync across browser tabs in real-time: + +```typescript +const settingsCollection = createCollection( + localStorageCollectionOptions({ + id: 'settings', + storageKey: 'app-settings', + getKey: (item) => item.id, + }) +) + +// Changes in one tab are automatically reflected in all other tabs +// This works automatically via storage events +``` + +## Using SessionStorage + +You can use `sessionStorage` instead of `localStorage` for session-only persistence: + +```typescript +const sessionCollection = createCollection( + localStorageCollectionOptions({ + id: 'session-data', + storageKey: 'session-key', + storage: sessionStorage, // Use sessionStorage instead + getKey: (item) => item.id, + }) +) +``` + +## Custom Storage Backend + +Provide any storage implementation that matches the localStorage API: + +```typescript +// Example: Custom storage wrapper with encryption +const encryptedStorage = { + getItem(key: string) { + const encrypted = localStorage.getItem(key) + return encrypted ? decrypt(encrypted) : null + }, + setItem(key: string, value: string) { + localStorage.setItem(key, encrypt(value)) + }, + removeItem(key: string) { + localStorage.removeItem(key) + }, +} + +const secureCollection = createCollection( + localStorageCollectionOptions({ + id: 'secure-data', + storageKey: 'encrypted-key', + storage: encryptedStorage, + getKey: (item) => item.id, + }) +) +``` + +### Cross-Tab Sync with Custom Storage + +The `storageEventApi` option (defaults to `window`) allows the collection to subscribe to storage events for cross-tab synchronization. A custom storage implementation can provide this API to enable custom cross-tab, cross-window, or cross-process sync: + +```typescript +// Example: Custom storage event API for cross-process sync +const customStorageEventApi = { + addEventListener(event: string, handler: (e: StorageEvent) => void) { + // Custom event subscription logic + // Could be IPC, WebSocket, or any other mechanism + myCustomEventBus.on('storage-change', handler) + }, + removeEventListener(event: string, handler: (e: StorageEvent) => void) { + myCustomEventBus.off('storage-change', handler) + }, +} + +const syncedCollection = createCollection( + localStorageCollectionOptions({ + id: 'synced-data', + storageKey: 'data-key', + storage: customStorage, + storageEventApi: customStorageEventApi, // Custom event API + getKey: (item) => item.id, + }) +) +``` + +This enables synchronization across different contexts beyond just browser tabs, such as: +- Cross-process communication in Electron apps +- WebSocket-based sync across multiple browser windows +- Custom IPC mechanisms in desktop applications + +## Mutation Handlers + +Mutation handlers are **completely optional**. Data will persist to localStorage whether or not you provide handlers: + +```typescript +const preferencesCollection = createCollection( + localStorageCollectionOptions({ + id: 'preferences', + storageKey: 'user-prefs', + getKey: (item) => item.id, + // Optional: Add custom logic when preferences are updated + onUpdate: async ({ transaction }) => { + const { modified } = transaction.mutations[0] + console.log('Preference updated:', modified) + // Maybe send analytics or trigger other side effects + }, + }) +) +``` + +## Manual Transactions + +When using LocalStorage collections with manual transactions (created via `createTransaction`), you must call `utils.acceptMutations()` to persist the changes: + +```typescript +import { createTransaction } from '@tanstack/react-db' + +const localData = createCollection( + localStorageCollectionOptions({ + id: 'form-draft', + storageKey: 'draft-data', + getKey: (item) => item.id, + }) +) + +const serverCollection = createCollection( + queryCollectionOptions({ + queryKey: ['items'], + queryFn: async () => api.items.getAll(), + getKey: (item) => item.id, + onInsert: async ({ transaction }) => { + await api.items.create(transaction.mutations[0].modified) + }, + }) +) + +const tx = createTransaction({ + mutationFn: async ({ transaction }) => { + // Handle server collection mutations explicitly in mutationFn + await Promise.all( + transaction.mutations + .filter((m) => m.collection === serverCollection) + .map((m) => api.items.create(m.modified)) + ) + + // After server mutations succeed, persist local collection mutations + localData.utils.acceptMutations(transaction) + }, +}) + +// Apply mutations to both collections in one transaction +tx.mutate(() => { + localData.insert({ id: 'draft-1', data: '...' }) + serverCollection.insert({ id: '1', name: 'Item' }) +}) + +await tx.commit() +``` + +## Complete Example + +```typescript +import { createCollection, eq } from '@tanstack/react-db' +import { localStorageCollectionOptions } from '@tanstack/react-db' +import { useLiveQuery } from '@tanstack/react-db' +import { z } from 'zod' + +// Define schema +const userPrefsSchema = z.object({ + id: z.string(), + theme: z.enum(['light', 'dark', 'auto']), + language: z.string(), + notifications: z.boolean(), +}) + +type UserPrefs = z.infer + +// Create collection +export const userPreferencesCollection = createCollection( + localStorageCollectionOptions({ + id: 'user-preferences', + storageKey: 'app-user-prefs', + getKey: (item) => item.id, + schema: userPrefsSchema, + }) +) + +// Use in component +function SettingsPanel() { + const { data: prefs } = useLiveQuery((q) => + q.from({ pref: userPreferencesCollection }) + .where(({ pref }) => eq(pref.id, 'current-user')) + ) + + const currentPrefs = prefs[0] + + const updateTheme = (theme: 'light' | 'dark' | 'auto') => { + if (currentPrefs) { + userPreferencesCollection.update(currentPrefs.id, (draft) => { + draft.theme = theme + }) + } else { + userPreferencesCollection.insert({ + id: 'current-user', + theme, + language: 'en', + notifications: true, + }) + } + } + + return ( +
+

Theme: {currentPrefs?.theme}

+ + +
+ ) +} +``` + +## Use Cases + +LocalStorage collections are perfect for: +- User preferences and settings +- UI state that should persist across sessions +- Form drafts +- Recently viewed items +- User-specific configurations +- Small amounts of cached data + +## Learn More + +- [Optimistic Mutations](../guides/mutations.md) +- [Live Queries](../guides/live-queries.md) +- [LocalOnly Collection](./local-only-collection.md) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md new file mode 100644 index 000000000..36c2d8be2 --- /dev/null +++ b/docs/collections/powersync-collection.md @@ -0,0 +1,578 @@ +--- +title: PowerSync Collection +--- + +# PowerSync Collection + +PowerSync collections provide seamless integration between TanStack DB and [PowerSync](https://powersync.com), enabling automatic synchronization between your in-memory TanStack DB collections and PowerSync's SQLite database. This gives you offline-ready persistence, real-time sync capabilities, and powerful conflict resolution. + +## Overview + +The `@tanstack/powersync-db-collection` package allows you to create collections that: + +- Automatically mirror the state of an underlying PowerSync SQLite database +- Reactively update when PowerSync records change +- Support optimistic mutations with rollback on error +- Provide persistence handlers to keep PowerSync in sync with TanStack DB transactions +- Use PowerSync's efficient SQLite-based storage engine +- Work with PowerSync's real-time sync features for offline-first scenarios +- Leverage PowerSync's built-in conflict resolution and data consistency guarantees +- Enable real-time synchronization with PostgreSQL, MongoDB and MySQL backends + +## 1. Installation + +Install the PowerSync collection package along with your preferred framework integration. +PowerSync currently works with Web, React Native and Node.js. The examples below use the Web SDK. +See the PowerSync quickstart [docs](https://docs.powersync.com/installation/quickstart-guide) for more details. + +```bash +npm install @tanstack/powersync-db-collection @powersync/web @journeyapps/wa-sqlite +``` + +### 2. Create a PowerSync Database and Schema + +```ts +import { Schema, Table, column } from "@powersync/web" + +// Define your schema +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + author: column.text, + created_at: column.text, + archived: column.integer, + }), +}) + +// Initialize PowerSync database +const db = new PowerSyncDatabase({ + database: { + dbFilename: "app.sqlite", + }, + schema: APP_SCHEMA, +}) +``` + +### 3. (optional) Configure Sync with a Backend + +```ts +import { + AbstractPowerSyncDatabase, + PowerSyncBackendConnector, + PowerSyncCredentials, +} from "@powersync/web" + +// TODO implement your logic here +class Connector implements PowerSyncBackendConnector { + fetchCredentials: () => Promise + + /** Upload local changes to the app backend. + * + * Use {@link AbstractPowerSyncDatabase.getCrudBatch} to get a batch of changes to upload. + * + * Any thrown errors will result in a retry after the configured wait period (default: 5 seconds). + */ + uploadData: (database: AbstractPowerSyncDatabase) => Promise +} + +// Configure the client to connect to a PowerSync service and your backend +db.connect(new Connector()) +``` + +### 4. Create a TanStack DB Collection + +There are two main ways to create a collection: using type inference or using schema validation. Type inference will infer collection types from the underlying PowerSync SQLite tables. Schema validation can be used for additional input/output validations and type transforms. + +#### Option 1: Using Table Type Inference + +The collection types are automatically inferred from the PowerSync schema table definition. The table is used to construct a default standard schema validator which is used internally to validate collection operations. + +Collection mutations accept SQLite types and queries report data with SQLite types. + +```ts +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + }) +) + +/** Note: The types for input and output are defined as this */ +// Used for mutations like `insert` or `update` +type DocumentCollectionInput = { + id: string + name: string | null + author: string | null + created_at: string | null // SQLite TEXT + archived: number | null // SQLite integer +} +// The type of query/data results +type DocumentCollectionOutput = DocumentCollectionInput +``` + +The standard PowerSync SQLite types map to these TypeScript types: + +| PowerSync Column Type | TypeScript Type | Description | +| --------------------- | ---------------- | -------------------------------------------------------------------- | +| `column.text` | `string \| null` | Text values, commonly used for strings, JSON, dates (as ISO strings) | +| `column.integer` | `number \| null` | Integer values, also used for booleans (0/1) | +| `column.real` | `number \| null` | Floating point numbers | + +Note: All PowerSync column types are nullable by default. + +#### Option 2: SQLite Types with Schema Validation + +Additional validations for collection mutations can be performed with a custom schema. The Schema below asserts that +the `name`, `author` and `created_at` fields are required as input. `name` also has an additional string length check. + +Note: The input and output types specified in this example still satisfy the underlying SQLite types. An additional `deserializationSchema` is required if the typing differs. See the examples below for more details. + +The application logic (including the backend) should enforce that all incoming synced data passes validation with the `schema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. + +```ts +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" +import { z } from "zod" + +// Schema validates SQLite types but adds constraints +const schema = z.object({ + id: z.string(), + name: z.string().min(3, { message: "Should be at least 3 characters" }), + author: z.string(), + created_at: z.string(), // SQLite TEXT for dates + archived: z.number(), +}) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + onDeserializationError: (error) => { + // Present fatal error + }, + }) +) + +/** Note: The types for input and output are defined as this */ +// Used for mutations like `insert` or `update` +type DocumentCollectionInput = { + id: string + name: string + author: string + created_at: string // SQLite TEXT + archived: number // SQLite integer +} +// The type of query/data results +type DocumentCollectionOutput = DocumentCollectionInput +``` + +#### Option 3: Transform SQLite Input Types to Rich Output Types + +You can transform SQLite types to richer types (like Date objects) while keeping SQLite-compatible input types: + +Note: The Transformed types are provided by TanStackDB to the PowerSync SQLite persister. These types need to be serialized in +order to be persisted to SQLite. Most types are converted by default. For custom types, override the serialization by providing a +`serializer` param. + +The example below uses `nullable` columns, this is not a requirement. + +The application logic (including the backend) should enforce that all incoming synced data passes validation with the `schema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. + +```ts +const schema = z.object({ + id: z.string(), + name: z.string().nullable(), + created_at: z + .string() + .nullable() + .transform((val) => (val ? new Date(val) : null)), // Transform SQLite TEXT to Date + archived: z + .number() + .nullable() + .transform((val) => (val != null ? val > 0 : null)), // Transform SQLite INTEGER to boolean +}) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + onDeserializationError: (error) => { + // Present fatal error + }, + // Optional: custom column serialization + serializer: { + // Dates are serialized by default, this is just an example + created_at: (value) => (value ? value.toISOString() : null), + }, + }) +) + +/** Note: The types for input and output are defined as this */ +// Used for mutations like `insert` or `update` +type DocumentCollectionInput = { + id: string + name: string | null + author: string | null + created_at: string | null // SQLite TEXT + archived: number | null +} +// The type of query/data results +type DocumentCollectionOutput = { + id: string + name: string | null + author: string | null + created_at: Date | null // JS Date instance + archived: boolean | null // JS boolean +} +``` + +#### Option 4: Custom Input/Output Types with Deserialization + +The input and output types can be completely decoupled from the internal SQLite types. This can be used to accept rich values for input mutations. +We require an additional `deserializationSchema` in order to validate and transform incoming synced (SQLite) updates. This schema should convert the incoming SQLite update to the output type. + +The application logic (including the backend) should enforce that all incoming synced data passes validation with the `deserializationSchema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. + +```ts +// Our input/output types use Date and boolean +const schema = z.object({ + id: z.string(), + name: z.string(), + author: z.string(), + created_at: z.date(), // Accept Date objects as input + archived: z.boolean(), // Accept Booleans as input +}) + +// Schema to transform from SQLite types to our output types +const deserializationSchema = z.object({ + id: z.string(), + name: z.string(), + author: z.string(), + created_at: z + .string() + .transform((val) => (new Date(val))), // SQLite TEXT to Date + archived: z + .number() + .transform((val) => (val > 0), // SQLite INTEGER to Boolean +}) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + deserializationSchema, + onDeserializationError: (error) => { + // Present fatal error + }, + }) +) + +/** Note: The types for input and output are defined as this */ +// Used for mutations like `insert` or `update` +type DocumentCollectionInput = { + id: string + name: string + author: string + created_at: Date + archived: boolean +} +// The type of query/data results +type DocumentCollectionOutput = DocumentCollectionInput +``` + +## Features + +### Offline-First + +PowerSync collections are offline-first by default. All data is stored locally in a SQLite database, allowing your app to work without an internet connection. Changes are automatically synced when connectivity is restored. + +### Real-Time Sync + +When connected to a PowerSync backend, changes are automatically synchronized in real-time across all connected clients. The sync process handles: + +- Bi-directional sync with the server +- Conflict resolution +- Queue management for offline changes +- Automatic retries on connection loss + +### Working with Rich JavaScript Types + +PowerSync collections support rich JavaScript types like `Date`, `Boolean`, and custom objects while maintaining SQLite compatibility. The collection handles serialization and deserialization automatically: + +```typescript +import { z } from "zod" +import { Schema, Table, column } from "@powersync/web" +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" + +// Define PowerSync SQLite schema +const APP_SCHEMA = new Schema({ + tasks: new Table({ + title: column.text, + due_date: column.text, // Stored as ISO string in SQLite + completed: column.integer, // Stored as 0/1 in SQLite + metadata: column.text, // Stored as JSON string in SQLite + }), +}) + +// Define rich types schema +const taskSchema = z.object({ + id: z.string(), + title: z.string().nullable(), + due_date: z + .string() + .nullable() + .transform((val) => (val ? new Date(val) : null)), // Convert to Date + completed: z + .number() + .nullable() + .transform((val) => (val != null ? val > 0 : null)), // Convert to boolean + metadata: z + .string() + .nullable() + .transform((val) => (val ? JSON.parse(val) : null)), // Parse JSON +}) + +// Create collection with rich types +const tasksCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.tasks, + schema: taskSchema, + }) +) + +// Work with rich types in your code +await tasksCollection.insert({ + id: crypto.randomUUID(), + title: "Review PR", + due_date: "2025-10-30T10:00:00Z", // String input is automatically converted to Date + completed: 0, // Number input is automatically converted to boolean + metadata: JSON.stringify({ priority: "high" }), +}) + +// Query returns rich types +const task = tasksCollection.get("task-1") +console.log(task.due_date instanceof Date) // true +console.log(typeof task.completed) // "boolean" +console.log(task.metadata.priority) // "high" +``` + +### Type Safety with Rich Types + +The collection maintains type safety throughout: + +```typescript +type TaskInput = { + id: string + title: string | null + due_date: string | null // Accept ISO string for mutations + completed: number | null // Accept 0/1 for mutations + metadata: string | null // Accept JSON string for mutations +} + +type TaskOutput = { + id: string + title: string | null + due_date: Date | null // Get Date object in queries + completed: boolean | null // Get boolean in queries + metadata: { + priority: string + [key: string]: any + } | null +} + +// TypeScript enforces correct types: +tasksCollection.insert({ + due_date: new Date(), // Error: Type 'Date' is not assignable to type 'string' +}) + +const task = tasksCollection.get("task-1") +task.due_date.getTime() // OK - TypeScript knows this is a Date +``` + +### Optimistic Updates + +Updates to the collection are applied optimistically to the local state first, then synchronized with PowerSync and the backend. If an error occurs during sync, the changes are automatically rolled back. + +### Metadata Tracking + +Metadata tracking allows attaching custom metadata to collection operations (insert, update, delete). This metadata is persisted alongside the operation and available in PowerSync `CrudEntry` records during upload processing. This is useful for passing additional context about mutations to the backend, such as audit information, operation sources, or custom processing hints. + +#### Enabling Metadata Tracking + +Metadata tracking must be enabled on the PowerSync table: + +```typescript +const APP_SCHEMA = new Schema({ + documents: new Table( + { + name: column.text, + author: column.text, + }, + { + // Enable metadata tracking on this table + trackMetadata: true, + } + ), +}) +``` + +#### Using Metadata in Operations + +Once enabled, metadata can be passed to any collection operation: + +```typescript +const documents = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + }) +) + +// Insert with metadata +await documents.insert( + { + id: crypto.randomUUID(), + name: "Report Q4", + author: "Jane Smith", + }, + { + metadata: { + source: "web-app", + userId: "user-123", + timestamp: Date.now(), + }, + } +).isPersisted.promise + +// Update with metadata +await documents.update( + docId, + { metadata: { reason: "typo-fix", editor: "user-456" } }, + (doc) => { + doc.name = "Report Q4 (Updated)" + } +).isPersisted.promise + +// Delete with metadata +await documents.delete(docId, { + metadata: { deletedBy: "user-789", reason: "duplicate" }, +}).isPersisted.promise +``` + +#### Accessing Metadata During Upload + +The metadata is available in PowerSync `CrudEntry` records when processing uploads in the connector: + +```typescript +import { CrudEntry } from "@powersync/web" + +class Connector implements PowerSyncBackendConnector { + // ... + + async uploadData(database: AbstractPowerSyncDatabase) { + const batch = await database.getCrudBatch() + if (!batch) return + + for (const entry of batch.crud) { + console.log("Operation:", entry.op) // PUT, PATCH, DELETE + console.log("Table:", entry.table) + console.log("Data:", entry.opData) + console.log("Metadata:", entry.metadata) // Custom metadata (stringified) + + // Parse metadata if needed + if (entry.metadata) { + const meta = JSON.parse(entry.metadata) + console.log("Source:", meta.source) + console.log("User ID:", meta.userId) + } + + // Process the operation with the backend... + } + + await batch.complete() + } +} +``` + +**Note**: If metadata is provided to an operation but the table doesn't have `trackMetadata: true`, a warning will be logged and the metadata will be ignored. + +## Configuration Options + +The `powerSyncCollectionOptions` function accepts the following options: + +```ts +interface PowerSyncCollectionConfig { + // Required options + database: PowerSyncDatabase + table: Table + + // Schema validation and type transformation + schema?: StandardSchemaV1 + deserializationSchema?: StandardSchemaV1 // Required for custom input types + onDeserializationError?: (error: StandardSchemaV1.FailureResult) => void // Required for custom input types + + // Optional Custom serialization + serializer?: { + [Key in keyof TOutput]?: (value: TOutput[Key]) => SQLiteCompatibleType + } + + // Performance tuning + syncBatchSize?: number // Control batch size for initial sync, defaults to 1000 +} +``` + +## Advanced Transactions + +When you need more control over transaction handling, such as batching multiple operations or handling complex transaction scenarios, you can use PowerSync's transaction system directly with TanStack DB transactions. + +```ts +import { createTransaction } from "@tanstack/react-db" +import { PowerSyncTransactor } from "@tanstack/powersync-db-collection" + +// Create a transaction that won't auto-commit +const batchTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + // Use PowerSyncTransactor to apply the transaction to PowerSync + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, +}) + +// Perform multiple operations in the transaction +batchTx.mutate(() => { + // Add multiple documents in a single transaction + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ + id: crypto.randomUUID(), + name: `Document ${i}`, + content: `Content ${i}`, + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }) + } +}) + +// Commit the transaction +await batchTx.commit() + +// Wait for the changes to be persisted +await batchTx.isPersisted.promise +``` + +This approach allows you to: + +- Batch multiple operations into a single transaction +- Control when the transaction is committed +- Ensure all operations are atomic +- Wait for persistence confirmation +- Handle complex transaction scenarios diff --git a/docs/collections/query-collection.md b/docs/collections/query-collection.md index cd4e7eedc..cf13291a8 100644 --- a/docs/collections/query-collection.md +++ b/docs/collections/query-collection.md @@ -9,7 +9,8 @@ Query collections provide seamless integration between TanStack DB and TanStack ## Overview The `@tanstack/query-db-collection` package allows you to create collections that: -- Automatically sync with remote data via TanStack Query + +- Automatically fetch remote data via TanStack Query - Support optimistic updates with automatic rollback on errors - Handle persistence through customizable mutation handlers - Provide direct write capabilities for directly writing to the sync store @@ -23,17 +24,17 @@ npm install @tanstack/query-db-collection @tanstack/query-core @tanstack/db ## Basic Usage ```typescript -import { QueryClient } from '@tanstack/query-core' -import { createCollection } from '@tanstack/db' -import { queryCollectionOptions } from '@tanstack/query-db-collection' +import { QueryClient } from "@tanstack/query-core" +import { createCollection } from "@tanstack/db" +import { queryCollectionOptions } from "@tanstack/query-db-collection" const queryClient = new QueryClient() const todosCollection = createCollection( queryCollectionOptions({ - queryKey: ['todos'], + queryKey: ["todos"], queryFn: async () => { - const response = await fetch('/api/todos') + const response = await fetch("/api/todos") return response.json() }, queryClient, @@ -55,9 +56,9 @@ The `queryCollectionOptions` function accepts the following options: ### Query Options -- `select`: Function that lets extract array items when they’re wrapped with metadata +- `select`: Function that lets extract array items when they're wrapped with metadata - `enabled`: Whether the query should automatically run (default: `true`) -- `refetchInterval`: Refetch interval in milliseconds +- `refetchInterval`: Refetch interval in milliseconds (default: 0 — set an interval to enable polling refetching) - `retry`: Retry configuration for failed queries - `retryDelay`: Delay between retries - `staleTime`: How long data is considered fresh @@ -76,6 +77,126 @@ The `queryCollectionOptions` function accepts the following options: - `onUpdate`: Handler called before update operations - `onDelete`: Handler called before delete operations +## Extending Meta with Custom Properties + +The `meta` option allows you to pass additional metadata to your query function. By default, Query Collections automatically include `loadSubsetOptions` in the meta object, which contains filtering, sorting, and pagination options for on-demand queries. + +### Type-Safe Meta Access + +The `ctx.meta.loadSubsetOptions` property is automatically typed as `LoadSubsetOptions` without requiring any additional imports or type assertions: + +```typescript +import { parseLoadSubsetOptions } from "@tanstack/query-db-collection" + +const collection = createCollection( + queryCollectionOptions({ + queryKey: ["products"], + syncMode: "on-demand", + queryFn: async (ctx) => { + // ✅ Type-safe access - no @ts-ignore needed! + const options = parseLoadSubsetOptions(ctx.meta?.loadSubsetOptions) + + // Use the parsed options to fetch only what you need + return api.getProducts(options) + }, + queryClient, + getKey: (item) => item.id, + }) +) +``` + +### Adding Custom Meta Properties + +You can extend the meta type to include your own custom properties using TypeScript's module augmentation: + +```typescript +// In a global type definition file (e.g., types.d.ts or global.d.ts) +declare module "@tanstack/query-db-collection" { + interface QueryCollectionMeta { + // Add your custom properties here + userId?: string + includeDeleted?: boolean + cacheTTL?: number + } +} +``` + +Once you've extended the interface, your custom properties are fully typed throughout your application: + +```typescript +const collection = createCollection( + queryCollectionOptions({ + queryKey: ["todos"], + queryFn: async (ctx) => { + // ✅ Both loadSubsetOptions and custom properties are typed + const { loadSubsetOptions, userId, includeDeleted } = ctx.meta + + return api.getTodos({ + ...parseLoadSubsetOptions(loadSubsetOptions), + userId, + includeDeleted, + }) + }, + queryClient, + getKey: (item) => item.id, + // Pass custom meta alongside Query Collection defaults + meta: { + userId: "user-123", + includeDeleted: false, + }, + }) +) +``` + +### Important Notes + +- The module augmentation pattern follows TanStack Query's official approach for typing meta +- `QueryCollectionMeta` is an interface (not a type alias), enabling proper TypeScript declaration merging +- Your custom properties are merged with the base `loadSubsetOptions` property +- All meta properties must be compatible with `Record` +- The augmentation should be done in a file that's included in your TypeScript compilation + +### Example: API Request Context + +A common use case is passing request context to your query function: + +```typescript +// types.d.ts +declare module "@tanstack/query-db-collection" { + interface QueryCollectionMeta { + authToken?: string + locale?: string + version?: string + } +} + +// collections.ts +const productsCollection = createCollection( + queryCollectionOptions({ + queryKey: ["products"], + queryFn: async (ctx) => { + const { loadSubsetOptions, authToken, locale, version } = ctx.meta + + return api.getProducts({ + ...parseLoadSubsetOptions(loadSubsetOptions), + headers: { + Authorization: `Bearer ${authToken}`, + "Accept-Language": locale, + "API-Version": version, + }, + }) + }, + queryClient, + getKey: (item) => item.id, + meta: { + authToken: session.token, + locale: "en-US", + version: "v1", + }, + }) +) +``` + ## Persistence Handlers You can define handlers that are called when mutations occur. These handlers can persist changes to your backend and control whether the query should refetch after the operation: @@ -83,30 +204,30 @@ You can define handlers that are called when mutations occur. These handlers can ```typescript const todosCollection = createCollection( queryCollectionOptions({ - queryKey: ['todos'], + queryKey: ["todos"], queryFn: fetchTodos, queryClient, getKey: (item) => item.id, - + onInsert: async ({ transaction }) => { - const newItems = transaction.mutations.map(m => m.modified) + const newItems = transaction.mutations.map((m) => m.modified) await api.createTodos(newItems) // Returning nothing or { refetch: true } will trigger a refetch // Return { refetch: false } to skip automatic refetch }, - + onUpdate: async ({ transaction }) => { - const updates = transaction.mutations.map(m => ({ + const updates = transaction.mutations.map((m) => ({ id: m.key, - changes: m.changes + changes: m.changes, })) await api.updateTodos(updates) }, - + onDelete: async ({ transaction }) => { - const ids = transaction.mutations.map(m => m.key) + const ids = transaction.mutations.map((m) => m.key) await api.deleteTodos(ids) - } + }, }) ) ``` @@ -119,14 +240,15 @@ You can control this behavior by returning an object with a `refetch` property: ```typescript onInsert: async ({ transaction }) => { - await api.createTodos(transaction.mutations.map(m => m.modified)) - + await api.createTodos(transaction.mutations.map((m) => m.modified)) + // Skip the automatic refetch return { refetch: false } } ``` This is useful when: + - You're confident the server state matches what you sent - You want to avoid unnecessary network requests - You're handling state updates through other mechanisms (like WebSockets) @@ -135,7 +257,10 @@ This is useful when: The collection provides these utility methods via `collection.utils`: -- `refetch()`: Manually trigger a refetch of the query +- `refetch(opts?)`: Manually trigger a refetch of the query + - `opts.throwOnError`: Whether to throw an error if the refetch fails (default: `false`) + - Bypasses `enabled: false` to support imperative/manual refetching patterns (similar to hook `refetch()` behavior) + - Returns `QueryObserverResult` for inspecting the result ## Direct Writes @@ -144,10 +269,12 @@ Direct writes are intended for scenarios where the normal query/mutation flow do ### Understanding the Data Stores Query Collections maintain two data stores: + 1. **Synced Data Store** - The authoritative state synchronized with the server via `queryFn` 2. **Optimistic Mutations Store** - Temporary changes that are applied optimistically before server confirmation Normal collection operations (insert, update, delete) create optimistic mutations that are: + - Applied immediately to the UI - Sent to the server via persistence handlers - Rolled back automatically if the server request fails @@ -158,6 +285,7 @@ Direct writes bypass this system entirely and write directly to the synced data ### When to Use Direct Writes Direct writes should be used when: + - You need to sync real-time updates from WebSockets or server-sent events - You're dealing with large datasets where refetching everything is too expensive - You receive incremental updates or server-computed field updates @@ -167,19 +295,28 @@ Direct writes should be used when: ```typescript // Insert a new item directly to the synced data store -todosCollection.utils.writeInsert({ id: '1', text: 'Buy milk', completed: false }) +todosCollection.utils.writeInsert({ + id: "1", + text: "Buy milk", + completed: false, +}) // Update an existing item in the synced data store -todosCollection.utils.writeUpdate({ id: '1', completed: true }) +todosCollection.utils.writeUpdate({ id: "1", completed: true }) // Delete an item from the synced data store -todosCollection.utils.writeDelete('1') +todosCollection.utils.writeDelete("1") // Upsert (insert or update) in the synced data store -todosCollection.utils.writeUpsert({ id: '1', text: 'Buy milk', completed: false }) +todosCollection.utils.writeUpsert({ + id: "1", + text: "Buy milk", + completed: false, +}) ``` These operations: + - Write directly to the synced data store - Do NOT create optimistic mutations - Do NOT trigger automatic query refetches @@ -192,10 +329,10 @@ The `writeBatch` method allows you to perform multiple operations atomically. An ```typescript todosCollection.utils.writeBatch(() => { - todosCollection.utils.writeInsert({ id: '1', text: 'Buy milk' }) - todosCollection.utils.writeInsert({ id: '2', text: 'Walk dog' }) - todosCollection.utils.writeUpdate({ id: '3', completed: true }) - todosCollection.utils.writeDelete('4') + todosCollection.utils.writeInsert({ id: "1", text: "Buy milk" }) + todosCollection.utils.writeInsert({ id: "2", text: "Walk dog" }) + todosCollection.utils.writeUpdate({ id: "3", completed: true }) + todosCollection.utils.writeDelete("4") }) ``` @@ -203,17 +340,17 @@ todosCollection.utils.writeBatch(() => { ```typescript // Handle real-time updates from WebSocket without triggering full refetches -ws.on('todos:update', (changes) => { +ws.on("todos:update", (changes) => { todosCollection.utils.writeBatch(() => { - changes.forEach(change => { + changes.forEach((change) => { switch (change.type) { - case 'insert': + case "insert": todosCollection.utils.writeInsert(change.data) break - case 'update': + case "update": todosCollection.utils.writeUpdate(change.data) break - case 'delete': + case "delete": todosCollection.utils.writeDelete(change.id) break } @@ -229,13 +366,13 @@ When the server returns computed fields (like server-generated IDs or timestamps ```typescript const todosCollection = createCollection( queryCollectionOptions({ - queryKey: ['todos'], + queryKey: ["todos"], queryFn: fetchTodos, queryClient, getKey: (item) => item.id, onInsert: async ({ transaction }) => { - const newItems = transaction.mutations.map(m => m.modified) + const newItems = transaction.mutations.map((m) => m.modified) // Send to server and get back items with server-computed fields const serverItems = await api.createTodos(newItems) @@ -243,7 +380,7 @@ const todosCollection = createCollection( // Sync server-computed fields (like server-generated IDs, timestamps, etc.) // to the collection's synced data store todosCollection.utils.writeBatch(() => { - serverItems.forEach(serverItem => { + serverItems.forEach((serverItem) => { todosCollection.utils.writeInsert(serverItem) }) }) @@ -254,26 +391,26 @@ const todosCollection = createCollection( }, onUpdate: async ({ transaction }) => { - const updates = transaction.mutations.map(m => ({ + const updates = transaction.mutations.map((m) => ({ id: m.key, - changes: m.changes + changes: m.changes, })) const serverItems = await api.updateTodos(updates) // Sync server-computed fields from the update response todosCollection.utils.writeBatch(() => { - serverItems.forEach(serverItem => { + serverItems.forEach((serverItem) => { todosCollection.utils.writeUpdate(serverItem) }) }) return { refetch: false } - } + }, }) ) // Usage is just like a regular collection -todosCollection.insert({ text: 'Buy milk', completed: false }) +todosCollection.insert({ text: "Buy milk", completed: false }) ``` ### Example: Large Dataset Pagination @@ -282,10 +419,10 @@ todosCollection.insert({ text: 'Buy milk', completed: false }) // Load additional pages without refetching existing data const loadMoreTodos = async (page) => { const newTodos = await api.getTodos({ page, limit: 50 }) - + // Add new items without affecting existing ones todosCollection.utils.writeBatch(() => { - newTodos.forEach(todo => { + newTodos.forEach((todo) => { todosCollection.utils.writeInsert(todo) }) }) @@ -318,31 +455,33 @@ Since the query collection expects `queryFn` to return the complete state, you c ```typescript const todosCollection = createCollection( queryCollectionOptions({ - queryKey: ['todos'], + queryKey: ["todos"], queryFn: async ({ queryKey }) => { // Get existing data from cache const existingData = queryClient.getQueryData(queryKey) || [] - + // Fetch only new/updated items (e.g., changes since last sync) - const lastSyncTime = localStorage.getItem('todos-last-sync') - const newData = await fetch(`/api/todos?since=${lastSyncTime}`).then(r => r.json()) - + const lastSyncTime = localStorage.getItem("todos-last-sync") + const newData = await fetch(`/api/todos?since=${lastSyncTime}`).then( + (r) => r.json() + ) + // Merge new data with existing data - const existingMap = new Map(existingData.map(item => [item.id, item])) - + const existingMap = new Map(existingData.map((item) => [item.id, item])) + // Apply updates and additions - newData.forEach(item => { + newData.forEach((item) => { existingMap.set(item.id, item) }) - + // Handle deletions if your API provides them if (newData.deletions) { - newData.deletions.forEach(id => existingMap.delete(id)) + newData.deletions.forEach((id) => existingMap.delete(id)) } - + // Update sync time - localStorage.setItem('todos-last-sync', new Date().toISOString()) - + localStorage.setItem("todos-last-sync", new Date().toISOString()) + // Return the complete merged state return Array.from(existingMap.values()) }, @@ -353,6 +492,7 @@ const todosCollection = createCollection( ``` This pattern allows you to: + - Fetch only incremental changes from your API - Merge those changes with existing data - Return the complete state that the collection expects @@ -363,6 +503,7 @@ This pattern allows you to: Direct writes update the collection immediately and also update the TanStack Query cache. However, they do not prevent the normal query sync behavior. If your `queryFn` returns data that conflicts with your direct writes, the query data will take precedence. To handle this properly: + 1. Use `{ refetch: false }` in your persistence handlers when using direct writes 2. Set appropriate `staleTime` to prevent unnecessary refetches 3. Design your `queryFn` to be aware of incremental updates (e.g., only fetch new data) @@ -376,4 +517,321 @@ All direct write methods are available on `collection.utils`: - `writeDelete(keys)`: Delete one or more items directly - `writeUpsert(data)`: Insert or update one or more items directly - `writeBatch(callback)`: Perform multiple operations atomically -- `refetch()`: Manually trigger a refetch of the query +- `refetch(opts?)`: Manually trigger a refetch of the query + +## QueryFn and Predicate Push-Down + +When using `syncMode: 'on-demand'`, the collection automatically pushes down query predicates (where clauses, orderBy, limit, and offset) to your `queryFn`. This allows you to fetch only the data needed for each specific query, rather than fetching the entire dataset. + +### How LoadSubsetOptions Are Passed + +LoadSubsetOptions are passed to your `queryFn` via the query context's `meta` property: + +```typescript +queryFn: async (ctx) => { + // Extract LoadSubsetOptions from the context + const { limit, offset, where, orderBy } = ctx.meta.loadSubsetOptions + + // Use these to fetch only the data you need + // - where: filter expression (AST) + // - orderBy: sort expression (AST) + // - limit: maximum number of rows + // - offset: number of rows to skip (for pagination) + // ... +} +``` + +The `where` and `orderBy` fields are expression trees (AST - Abstract Syntax Tree) that need to be parsed. TanStack DB provides helper functions to make this easy. + +### Expression Helpers + +```typescript +import { + parseWhereExpression, + parseOrderByExpression, + extractSimpleComparisons, + parseLoadSubsetOptions, +} from '@tanstack/db' +// Or from '@tanstack/query-db-collection' (re-exported for convenience) +``` + +These helpers allow you to parse expression trees without manually traversing complex AST structures. + +### Quick Start: Simple REST API + +```typescript +import { createCollection } from '@tanstack/react-db' +import { queryCollectionOptions } from '@tanstack/query-db-collection' +import { parseLoadSubsetOptions } from '@tanstack/db' +import { QueryClient } from '@tanstack/query-core' + +const queryClient = new QueryClient() + +const productsCollection = createCollection( + queryCollectionOptions({ + id: 'products', + queryKey: ['products'], + queryClient, + getKey: (item) => item.id, + syncMode: 'on-demand', // Enable predicate push-down + + queryFn: async (ctx) => { + const { limit, offset, where, orderBy } = ctx.meta.loadSubsetOptions + + // Parse the expressions into simple format + const parsed = parseLoadSubsetOptions({ where, orderBy, limit }) + + // Build query parameters from parsed filters + const params = new URLSearchParams() + + // Add filters + parsed.filters.forEach(({ field, operator, value }) => { + const fieldName = field.join('.') + if (operator === 'eq') { + params.set(fieldName, String(value)) + } else if (operator === 'lt') { + params.set(`${fieldName}_lt`, String(value)) + } else if (operator === 'gt') { + params.set(`${fieldName}_gt`, String(value)) + } + }) + + // Add sorting + if (parsed.sorts.length > 0) { + const sortParam = parsed.sorts + .map(s => `${s.field.join('.')}:${s.direction}`) + .join(',') + params.set('sort', sortParam) + } + + // Add limit + if (parsed.limit) { + params.set('limit', String(parsed.limit)) + } + + // Add offset for pagination + if (offset) { + params.set('offset', String(offset)) + } + + const response = await fetch(`/api/products?${params}`) + return response.json() + }, + }) +) + +// Usage with live queries +import { createLiveQueryCollection } from '@tanstack/react-db' +import { eq, lt, and } from '@tanstack/db' + +const affordableElectronics = createLiveQueryCollection({ + query: (q) => + q.from({ product: productsCollection }) + .where(({ product }) => and( + eq(product.category, 'electronics'), + lt(product.price, 100) + )) + .orderBy(({ product }) => product.price, 'asc') + .limit(10) + .select(({ product }) => product) +}) + +// This triggers a queryFn call with: +// GET /api/products?category=electronics&price_lt=100&sort=price:asc&limit=10 +// When paginating, offset is included: &offset=20 +``` + +### Custom Handlers for Complex APIs + +For APIs with specific formats, use custom handlers: + +```typescript +queryFn: async (ctx) => { + const { where, orderBy, limit } = ctx.meta.loadSubsetOptions + + // Use custom handlers to match your API's format + const filters = parseWhereExpression(where, { + handlers: { + eq: (field, value) => ({ + field: field.join('.'), + op: 'equals', + value + }), + lt: (field, value) => ({ + field: field.join('.'), + op: 'lessThan', + value + }), + and: (...conditions) => ({ + operator: 'AND', + conditions + }), + or: (...conditions) => ({ + operator: 'OR', + conditions + }), + } + }) + + const sorts = parseOrderByExpression(orderBy) + + return api.query({ + filters, + sort: sorts.map(s => ({ + field: s.field.join('.'), + order: s.direction.toUpperCase() + })), + limit + }) +} +``` + +### GraphQL Example + +```typescript +queryFn: async (ctx) => { + const { where, orderBy, limit } = ctx.meta.loadSubsetOptions + + // Convert to a GraphQL where clause format + const whereClause = parseWhereExpression(where, { + handlers: { + eq: (field, value) => ({ + [field.join('_')]: { _eq: value } + }), + lt: (field, value) => ({ + [field.join('_')]: { _lt: value } + }), + and: (...conditions) => ({ _and: conditions }), + or: (...conditions) => ({ _or: conditions }), + } + }) + + // Convert to a GraphQL order_by format + const sorts = parseOrderByExpression(orderBy) + const orderByClause = sorts.map(s => ({ + [s.field.join('_')]: s.direction + })) + + const { data } = await graphqlClient.query({ + query: gql` + query GetProducts($where: product_bool_exp, $orderBy: [product_order_by!], $limit: Int) { + product(where: $where, order_by: $orderBy, limit: $limit) { + id + name + category + price + } + } + `, + variables: { + where: whereClause, + orderBy: orderByClause, + limit + } + }) + + return data.product +} +``` + +### Expression Helper API Reference + +#### `parseLoadSubsetOptions(options)` + +Convenience function that parses all LoadSubsetOptions at once. Good for simple use cases. + +```typescript +const { filters, sorts, limit, offset } = parseLoadSubsetOptions(ctx.meta?.loadSubsetOptions) +// filters: [{ field: ['category'], operator: 'eq', value: 'electronics' }] +// sorts: [{ field: ['price'], direction: 'asc', nulls: 'last' }] +// limit: 10 +// offset: 20 (for pagination) +``` + +#### `parseWhereExpression(expr, options)` + +Parses a WHERE expression using custom handlers for each operator. Use this for complete control over the output format. + +```typescript +const filters = parseWhereExpression(where, { + handlers: { + eq: (field, value) => ({ [field.join('.')]: value }), + lt: (field, value) => ({ [`${field.join('.')}_lt`]: value }), + and: (...filters) => Object.assign({}, ...filters) + }, + onUnknownOperator: (operator, args) => { + console.warn(`Unsupported operator: ${operator}`) + return null + } +}) +``` + +#### `parseOrderByExpression(orderBy)` + +Parses an ORDER BY expression into a simple array. + +```typescript +const sorts = parseOrderByExpression(orderBy) +// Returns: [{ field: ['price'], direction: 'asc', nulls: 'last' }] +``` + +#### `extractSimpleComparisons(expr)` + +Extracts simple AND-ed comparisons from a WHERE expression. Note: Only works for simple AND conditions. + +```typescript +const comparisons = extractSimpleComparisons(where) +// Returns: [ +// { field: ['category'], operator: 'eq', value: 'electronics' }, +// { field: ['price'], operator: 'lt', value: 100 } +// ] +``` + +### Supported Operators + +- `eq` - Equality (=) +- `gt` - Greater than (>) +- `gte` - Greater than or equal (>=) +- `lt` - Less than (<) +- `lte` - Less than or equal (<=) +- `and` - Logical AND +- `or` - Logical OR +- `in` - IN clause + +### Using Query Key Builders + +Create different cache entries for different filter combinations: + +```typescript +const productsCollection = createCollection( + queryCollectionOptions({ + id: 'products', + // Dynamic query key based on filters + queryKey: (opts) => { + const parsed = parseLoadSubsetOptions(opts) + const cacheKey = ['products'] + + parsed.filters.forEach(f => { + cacheKey.push(`${f.field.join('.')}-${f.operator}-${f.value}`) + }) + + if (parsed.limit) { + cacheKey.push(`limit-${parsed.limit}`) + } + + return cacheKey + }, + queryClient, + getKey: (item) => item.id, + syncMode: 'on-demand', + queryFn: async (ctx) => { /* ... */ }, + }) +) +``` + +### Tips + +1. **Start with `parseLoadSubsetOptions`** for simple use cases +2. **Use custom handlers** via `parseWhereExpression` for APIs with specific formats +3. **Handle unsupported operators** with the `onUnknownOperator` callback +4. **Log parsed results** during development to verify correctness diff --git a/docs/collections/rxdb-collection.md b/docs/collections/rxdb-collection.md index 71ea3ffed..d139ddc7f 100644 --- a/docs/collections/rxdb-collection.md +++ b/docs/collections/rxdb-collection.md @@ -20,9 +20,9 @@ The `@tanstack/rxdb-db-collection` package allows you to create collections that - Leverage RxDB's [replication plugins](https://rxdb.info/replication.html) to sync with CouchDB, MongoDB, Supabase, REST APIs, GraphQL, WebRTC (P2P) and more. -## 1. Installation +### 1. Installation -Install the RXDB collection packages along with your preferred framework integration. +Install the RxDB collection packages along with your preferred framework integration. ```bash npm install @tanstack/rxdb-db-collection rxdb @tanstack/react-db @@ -35,8 +35,8 @@ npm install @tanstack/rxdb-db-collection rxdb @tanstack/react-db import { createRxDatabase, addRxPlugin } from 'rxdb/plugins/core' /** - * Here we use the localstorage based storage for RxDB. - * RxDB has a wide range of storages based on Dexie.js, IndexedDB, SQLite and more. + * Here we use the localStorage based storage for RxDB. + * RxDB has a wide range of storages based on Dexie.js, IndexedDB, SQLite, and more. */ import { getRxStorageLocalstorage } from 'rxdb/plugins/storage-localstorage' @@ -93,7 +93,7 @@ import { rxdbCollectionOptions } from '@tanstack/rxdb-db-collection' const todosCollection = createCollection( rxdbCollectionOptions({ - rxCollection: myDatabase.todos, + rxCollection: db.todos, startSync: true, // start ingesting RxDB data immediately }) ) @@ -118,7 +118,7 @@ The `rxdbCollectionOptions` function accepts the following options: ### Optional - `id`: Unique identifier for the collection -- `schema`: Schema for validating items. RxDB already has schema validation but having additional validation on the TanStack DB side can help to unify error handling between different tanstack collections. +- `schema`: Schema for validating items. RxDB already has schema validation but having additional validation on the TanStack DB side can help to unify error handling between different TanStack collections. - `startSync`: Whether to start syncing immediately (default: true) - `onInsert, onUpdate, onDelete`: Override default persistence handlers. By default, TanStack DB writes are persisted to RxDB using bulkUpsert, patch, and bulkRemove. - `syncBatchSize`: The maximum number of documents fetched per batch during the initial sync from RxDB into TanStack DB (default: 1000). Larger values reduce round trips but use more memory; smaller values are lighter but may increase query calls. Note that this only affects the initial sync. Ongoing live updates are streamed one by one via RxDB's change feed. @@ -132,3 +132,36 @@ Replication and sync in RxDB run independently of TanStack DB. You set up replic When replication runs, it pulls and pushes changes to the backend and applies them to the RxDB collection. Since the TanStack DB integration subscribes to the RxDB change stream, any changes applied by replication are automatically reflected in your TanStack DB collection. This separation of concerns means you configure replication entirely in RxDB, and TanStack DB automatically benefits: your TanStack collections always stay up to date with whatever sync strategy you choose. + + +## FAQ + +### Do I still need RxDB schema indexes if I only query TanStack DB? + +Usually not, at least for TanStack DB queries themselves. TanStack DB queries run entirely in memory, so RxDB schema indexes do not affect the performance of TanStack DB's live queries. However, RxDB indexes may still be important if: +- You run queries directly against RxDB (e.g. `rxCollection.find(...)`). +- Your replication setup uses filtered queries or selectors. +- You rely on RxDB to selectively load subsets of data instead of hydrating everything into memory. + +### Is data duplicated between RxDB and TanStack DB? + +Yes, intentionally. RxDB stores data durably on disk. TanStack DB stores data in memory for fast queries and reactivity. This duplication enables high-performance UI queries while retaining [local-first](https://rxdb.info/articles/local-first-future.html) persistence and sync. + +### How does backend ↔ RxDB ↔ TanStack DB synchronization work? + +Synchronization follows a clear separation of responsibilities between RxDB and TanStack DB. + +**RxDB** is responsible for persistence and networking. It stores data durably using a local storage engine (IndexedDB, SQLite, etc.) and handles all replication logic. Replication is configured directly on the RxDB collection and runs independently of TanStack DB. RxDB pulls changes from the backend, applies them locally, resolves conflicts, and pushes local changes back to the backend. + +**TanStack DB** sits on top as an in-memory, reactive query layer. It does not talk to the backend directly and does not participate in replication. Instead, it mirrors the current state of the RxDB collection in memory and provides fast live queries and optimistic mutations for the UI. + +This design intentionally forms two independent loops: +- A durability and sync loop managed entirely by RxDB (backend to RxDB). +- A reactive UI loop managed by TanStack DB (RxDB change stream to in-memory collections to live queries). + +## Learn More + +- [RxDB Documentation](https://rxdb.info/overview.html) +- [RxDB Sync Engine](https://rxdb.info/replication.html) +- [Tanstack DB Live Queries](https://tanstack.com/db/latest/docs/guides/live-queries) + diff --git a/docs/collections/trailbase-collection.md b/docs/collections/trailbase-collection.md new file mode 100644 index 000000000..1b1d60d42 --- /dev/null +++ b/docs/collections/trailbase-collection.md @@ -0,0 +1,226 @@ +--- +title: TrailBase Collection +--- + +# TrailBase Collection + +TrailBase collections provide seamless integration between TanStack DB and [TrailBase](https://trailbase.io), enabling real-time data synchronization with TrailBase's self-hosted application backend. + +## Overview + +[TrailBase](https://trailbase.io) is an easy-to-self-host, single-executable application backend with built-in SQLite, a V8 JS runtime, auth, admin UIs and sync functionality. + +The `@tanstack/trailbase-db-collection` package allows you to create collections that: +- Automatically sync data from TrailBase Record APIs +- Support real-time subscriptions when `enable_subscriptions` is enabled +- Handle optimistic updates with automatic rollback on errors +- Provide parse/serialize functions for data transformation + +## Installation + +```bash +npm install @tanstack/trailbase-db-collection @tanstack/react-db trailbase +``` + +## Basic Usage + +```typescript +import { createCollection } from '@tanstack/react-db' +import { trailBaseCollectionOptions } from '@tanstack/trailbase-db-collection' +import { initClient } from 'trailbase' + +const trailBaseClient = initClient(`https://your-trailbase-instance.com`) + +const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: 'todos', + recordApi: trailBaseClient.records('todos'), + getKey: (item) => item.id, + }) +) +``` + +## Configuration Options + +The `trailBaseCollectionOptions` function accepts the following options: + +### Required Options + +- `id`: Unique identifier for the collection +- `recordApi`: TrailBase Record API instance created via `trailBaseClient.records()` +- `getKey`: Function to extract the unique key from an item + +### Optional Options + +- `schema`: [Standard Schema](https://standardschema.dev) compatible schema (e.g., Zod, Effect) for client-side validation +- `parse`: Object mapping field names to parsing functions that transform data coming from TrailBase +- `serialize`: Object mapping field names to serialization functions that transform data going to TrailBase +- `onInsert`: Handler function called when items are inserted +- `onUpdate`: Handler function called when items are updated +- `onDelete`: Handler function called when items are deleted + +## Data Transformation + +TrailBase uses different data formats for storage (e.g., Unix timestamps). Use `parse` and `serialize` to handle these transformations: + +```typescript +type SelectTodo = { + id: string + text: string + created_at: number // Unix timestamp from TrailBase + completed: boolean +} + +type Todo = { + id: string + text: string + created_at: Date // JavaScript Date for app usage + completed: boolean +} + +const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: 'todos', + recordApi: trailBaseClient.records('todos'), + getKey: (item) => item.id, + schema: todoSchema, + // Transform TrailBase data to application format + parse: { + created_at: (ts) => new Date(ts * 1000), + }, + // Transform application data to TrailBase format + serialize: { + created_at: (date) => Math.floor(date.valueOf() / 1000), + }, + }) +) +``` + +## Real-time Subscriptions + +TrailBase supports real-time subscriptions when enabled on the server. The collection automatically subscribes to changes and updates in real-time: + +```typescript +const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: 'todos', + recordApi: trailBaseClient.records('todos'), + getKey: (item) => item.id, + // Real-time updates work automatically when + // enable_subscriptions is set in TrailBase config + }) +) + +// Changes from other clients will automatically update +// the collection in real-time +``` + +## Mutation Handlers + +Handle inserts, updates, and deletes by providing mutation handlers: + +```typescript +const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: 'todos', + recordApi: trailBaseClient.records('todos'), + getKey: (item) => item.id, + onInsert: async ({ transaction }) => { + const newTodo = transaction.mutations[0].modified + // TrailBase handles the persistence automatically + // Add custom logic here if needed + }, + onUpdate: async ({ transaction }) => { + const { original, modified } = transaction.mutations[0] + // TrailBase handles the persistence automatically + // Add custom logic here if needed + }, + onDelete: async ({ transaction }) => { + const deletedTodo = transaction.mutations[0].original + // TrailBase handles the persistence automatically + // Add custom logic here if needed + }, + }) +) +``` + +## Complete Example + +```typescript +import { createCollection } from '@tanstack/react-db' +import { trailBaseCollectionOptions } from '@tanstack/trailbase-db-collection' +import { initClient } from 'trailbase' +import { z } from 'zod' + +const trailBaseClient = initClient(`https://your-trailbase-instance.com`) + +// Define schema +const todoSchema = z.object({ + id: z.string(), + text: z.string(), + completed: z.boolean(), + created_at: z.date(), +}) + +type SelectTodo = { + id: string + text: string + completed: boolean + created_at: number +} + +type Todo = z.infer + +// Create collection +export const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: 'todos', + recordApi: trailBaseClient.records('todos'), + getKey: (item) => item.id, + schema: todoSchema, + parse: { + created_at: (ts) => new Date(ts * 1000), + }, + serialize: { + created_at: (date) => Math.floor(date.valueOf() / 1000), + }, + onInsert: async ({ transaction }) => { + const newTodo = transaction.mutations[0].modified + console.log('Todo created:', newTodo) + }, + }) +) + +// Use in component +function TodoList() { + const { data: todos } = useLiveQuery((q) => + q.from({ todo: todosCollection }) + .where(({ todo }) => !todo.completed) + .orderBy(({ todo }) => todo.created_at, 'desc') + ) + + const addTodo = (text: string) => { + todosCollection.insert({ + id: crypto.randomUUID(), + text, + completed: false, + created_at: new Date(), + }) + } + + return ( +
+ {todos.map((todo) => ( +
{todo.text}
+ ))} +
+ ) +} +``` + +## Learn More + +- [TrailBase Documentation](https://trailbase.io/documentation/) +- [TrailBase Record APIs](https://trailbase.io/documentation/apis_record/) +- [Optimistic Mutations](../guides/mutations.md) +- [Live Queries](../guides/live-queries.md) diff --git a/docs/config.json b/docs/config.json index b16df7560..09ca7c62b 100644 --- a/docs/config.json +++ b/docs/config.json @@ -21,53 +21,6 @@ "label": "Installation", "to": "installation" } - ], - "frameworks": [ - { - "label": "react", - "children": [ - { - "label": "React Adapter", - "to": "framework/react/adapter" - } - ] - }, - { - "label": "solid", - "children": [ - { - "label": "Solid Adapter", - "to": "framework/solid/adapter" - } - ] - }, - { - "label": "vue", - "children": [ - { - "label": "Vue Adapter", - "to": "framework/vue/adapter" - } - ] - }, - { - "label": "svelte", - "children": [ - { - "label": "Svelte Adapter", - "to": "framework/svelte/adapter" - } - ] - }, - { - "label": "angular", - "children": [ - { - "label": "Angular Adapter", - "to": "framework/angular/adapter" - } - ] - } ] }, { @@ -81,6 +34,10 @@ "label": "Mutations", "to": "guides/mutations" }, + { + "label": "Schemas", + "to": "guides/schemas" + }, { "label": "Error Handling", "to": "guides/error-handling" @@ -102,9 +59,50 @@ "label": "Electric Collection", "to": "collections/electric-collection" }, + { + "label": "TrailBase Collection", + "to": "collections/trailbase-collection" + }, { "label": "RxDB Collection", "to": "collections/rxdb-collection" + }, + { + "label": "PowerSync Collection", + "to": "collections/powersync-collection" + }, + { + "label": "LocalStorage Collection", + "to": "collections/local-storage-collection" + }, + { + "label": "LocalOnly Collection", + "to": "collections/local-only-collection" + } + ] + }, + { + "label": "Frameworks", + "children": [ + { + "label": "React", + "to": "framework/react/overview" + }, + { + "label": "Vue", + "to": "framework/vue/overview" + }, + { + "label": "Angular", + "to": "framework/angular/overview" + }, + { + "label": "Solid", + "to": "framework/solid/overview" + }, + { + "label": "Svelte", + "to": "framework/svelte/overview" } ] }, @@ -126,27 +124,27 @@ }, { "label": "Collection", - "to": "reference/interfaces/collection" + "to": "reference/interfaces/Collection" }, { "label": "createCollection", - "to": "reference/functions/createcollection" + "to": "reference/functions/createCollection" }, { "label": "liveQueryCollectionOptions", - "to": "reference/functions/livequerycollectionoptions" + "to": "reference/functions/liveQueryCollectionOptions" }, { "label": "createLiveQueryCollection", - "to": "reference/functions/createlivequerycollection" + "to": "reference/functions/createLiveQueryCollection" }, { "label": "createOptimisticAction", - "to": "reference/functions/createoptimisticaction" + "to": "reference/functions/createOptimisticAction" }, { "label": "createTransaction", - "to": "reference/functions/createtransaction" + "to": "reference/functions/createTransaction" }, { "label": "Electric DB Collection", @@ -154,7 +152,7 @@ }, { "label": "electricCollectionOptions", - "to": "reference/electric-db-collection/functions/electriccollectionoptions" + "to": "reference/electric-db-collection/functions/electricCollectionOptions" }, { "label": "Query DB Collection", @@ -162,7 +160,7 @@ }, { "label": "queryCollectionOptions", - "to": "reference/query-db-collection/functions/querycollectionoptions" + "to": "reference/query-db-collection/functions/queryCollectionOptions" }, { "label": "RxDB DB Collection", @@ -170,7 +168,15 @@ }, { "label": "rxdbCollectionOptions", - "to": "reference/rxdb-db-collection/functions/rxdbcollectionoptions" + "to": "reference/rxdb-db-collection/functions/rxdbCollectionOptions" + }, + { + "label": "PowerSync Collection", + "to": "reference/powersync-db-collection/index" + }, + { + "label": "powerSyncCollectionOptions", + "to": "reference/powersync-db-collection/functions/powerSyncCollectionOptions" } ], "frameworks": [ @@ -183,7 +189,7 @@ }, { "label": "useLiveQuery", - "to": "framework/react/reference/functions/uselivequery" + "to": "framework/react/reference/functions/useLiveQuery" } ] }, @@ -196,7 +202,7 @@ }, { "label": "useLiveQuery", - "to": "framework/solid/reference/functions/uselivequery" + "to": "framework/solid/reference/functions/useLiveQuery" } ] }, @@ -209,15 +215,15 @@ }, { "label": "useLiveQuery", - "to": "framework/vue/reference/functions/uselivequery" + "to": "framework/vue/reference/functions/useLiveQuery" }, { "label": "UseLiveQueryReturn", - "to": "framework/vue/reference/interfaces/uselivequeryreturn" + "to": "framework/vue/reference/interfaces/UseLiveQueryReturn" }, { "label": "UseLiveQueryReturnWithCollection", - "to": "framework/vue/reference/interfaces/uselivequeryreturnwithcollection" + "to": "framework/vue/reference/interfaces/UseLiveQueryReturnWithCollection" } ] }, @@ -230,7 +236,7 @@ }, { "label": "injectLiveQuery", - "to": "framework/angular/reference/functions/injectlivequery" + "to": "framework/angular/reference/functions/injectLiveQuery" } ] } diff --git a/docs/framework/angular/adapter.md b/docs/framework/angular/adapter.md deleted file mode 100644 index d4b911614..000000000 --- a/docs/framework/angular/adapter.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: TanStack DB Angular Adapter -id: adapter ---- - -## Installation - -```sh -npm install @tanstack/angular-db -``` - -## Angular inject function - -See the [Angular Functions Reference](../reference/index.md) to see the full list of functions available in the Angular Adapter. - -## Basic Usage diff --git a/docs/framework/angular/overview.md b/docs/framework/angular/overview.md new file mode 100644 index 000000000..916262313 --- /dev/null +++ b/docs/framework/angular/overview.md @@ -0,0 +1,212 @@ +--- +title: TanStack DB Angular Adapter +id: adapter +--- + +## Installation + +```sh +npm install @tanstack/angular-db +``` + +## Angular inject function + +See the [Angular Functions Reference](./reference/index.md) to see the full list of functions available in the Angular Adapter. + +For comprehensive documentation on writing queries (filtering, joins, aggregations, etc.), see the [Live Queries Guide](../../guides/live-queries). + +## Basic Usage + +### injectLiveQuery + +The `injectLiveQuery` function creates a live query that automatically updates your component when data changes. It returns an object containing Angular signals for reactive state management: + +```typescript +import { Component } from '@angular/core' +import { injectLiveQuery } from '@tanstack/angular-db' +import { eq } from '@tanstack/db' + +@Component({ + selector: 'app-todo-list', + standalone: true, + template: ` + @if (query.isLoading()) { +
Loading...
+ } @else { +
    + @for (todo of query.data(); track todo.id) { +
  • {{ todo.text }}
  • + } +
+ } + ` +}) +export class TodoListComponent { + query = injectLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) + ) +} +``` + +**Note:** All return values (`data`, `isLoading`, `status`, etc.) are Angular signals, so call them with `()` in your template: `query.data()`, `query.isLoading()`. + +> **Template Syntax:** Examples use Angular 17+ control flow (`@if`, `@for`). For Angular 16, use `*ngIf` and `*ngFor` instead. + +### Reactive Parameters + +For queries that depend on reactive values, use the `params` option to re-run the query when those values change: + +```typescript +import { Component, signal } from '@angular/core' +import { injectLiveQuery } from '@tanstack/angular-db' +import { gt } from '@tanstack/db' + +@Component({ + selector: 'app-filtered-todos', + standalone: true, + template: ` +
{{ query.data().length }} high-priority todos
+ ` +}) +export class FilteredTodosComponent { + minPriority = signal(5) + + query = injectLiveQuery({ + params: () => ({ minPriority: this.minPriority() }), + query: ({ params, q }) => + q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, params.minPriority)) + }) +} +``` + +#### When to Use Reactive Parameters + +Use the reactive `params` option when your query depends on: +- Component signals +- Input properties +- Computed values +- Other reactive state + +When any reactive value accessed in the `params` function changes, the query is recreated and re-executed. + +#### What Happens When Parameters Change + +When a parameter value changes: +1. The previous live-query collection is disposed +2. A new query is created with the updated parameter values +3. `status()`/`isLoading()` reflect the new query's lifecycle +4. `data()` updates automatically when the new results arrive + +#### Best Practices + +**Use reactive params for dynamic queries:** + +```typescript +import { Component, Input, signal } from '@angular/core' +import { injectLiveQuery } from '@tanstack/angular-db' +import { eq, and } from '@tanstack/db' + +@Component({ + selector: 'app-todo-list', + standalone: true, + template: `
{{ query.data().length }} todos
` +}) +export class TodoListComponent { + // Angular 16+ compatible input + @Input({ required: true }) userId!: number + status = signal('active') + + // Good - reactive params track all dependencies + query = injectLiveQuery({ + params: () => ({ + userId: this.userId, + status: this.status() + }), + query: ({ params, q }) => + q.from({ todos: todosCollection }) + .where(({ todos }) => and( + eq(todos.userId, params.userId), + eq(todos.status, params.status) + )) + }) +} +``` + +**Using Angular 17+ signal inputs:** + +```typescript +import { Component, input, signal } from '@angular/core' +import { injectLiveQuery } from '@tanstack/angular-db' +import { eq, and } from '@tanstack/db' + +@Component({ + selector: 'app-todo-list', + standalone: true, + template: `
{{ query.data().length }} todos
` +}) +export class TodoListComponent { + // Angular 17+ signal-based input + userId = input.required() + status = signal('active') + + query = injectLiveQuery({ + params: () => ({ + userId: this.userId(), + status: this.status() + }), + query: ({ params, q }) => + q.from({ todos: todosCollection }) + .where(({ todos }) => and( + eq(todos.userId, params.userId), + eq(todos.status, params.status) + )) + }) +} +``` + +**Static queries don't need params:** + +```typescript +import { Component } from '@angular/core' +import { injectLiveQuery } from '@tanstack/angular-db' + +@Component({ + selector: 'app-all-todos', + standalone: true, + template: `
{{ query.data().length }} todos
` +}) +export class AllTodosComponent { + // No reactive dependencies - query never changes + query = injectLiveQuery((q) => + q.from({ todos: todosCollection }) + ) +} +``` + +**Access multiple signals in template:** + +```typescript +import { Component } from '@angular/core' +import { injectLiveQuery } from '@tanstack/angular-db' +import { eq } from '@tanstack/db' + +@Component({ + selector: 'app-todos', + standalone: true, + template: ` +
Status: {{ query.status() }}
+
Loading: {{ query.isLoading() }}
+
Ready: {{ query.isReady() }}
+
Total: {{ query.data().length }}
+ ` +}) +export class TodosComponent { + query = injectLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + ) +} +``` diff --git a/docs/framework/angular/reference/functions/injectLiveQuery.md b/docs/framework/angular/reference/functions/injectLiveQuery.md new file mode 100644 index 000000000..46a9a35fb --- /dev/null +++ b/docs/framework/angular/reference/functions/injectLiveQuery.md @@ -0,0 +1,178 @@ +--- +id: injectLiveQuery +title: injectLiveQuery +--- + +# Function: injectLiveQuery() + +## Call Signature + +```ts +function injectLiveQuery(options): InjectLiveQueryResult<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }>; +``` + +Defined in: [index.ts:51](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L51) + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +#### TParams + +`TParams` *extends* `unknown` + +### Parameters + +#### options + +##### params + +() => `TParams` + +##### query + +(`args`) => `QueryBuilder`\<`TContext`\> + +### Returns + +[`InjectLiveQueryResult`](../interfaces/InjectLiveQueryResult.md)\<\{ \[K in string \| number \| symbol\]: (TContext\["result"\] extends object ? any\[any\] : TContext\["hasJoins"\] extends true ? TContext\["schema"\] : TContext\["schema"\]\[TContext\["fromSourceName"\]\])\[K\] \}\> + +## Call Signature + +```ts +function injectLiveQuery(options): InjectLiveQueryResult<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }>; +``` + +Defined in: [index.ts:61](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L61) + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +#### TParams + +`TParams` *extends* `unknown` + +### Parameters + +#### options + +##### params + +() => `TParams` + +##### query + +(`args`) => `QueryBuilder`\<`TContext`\> \| `null` \| `undefined` + +### Returns + +[`InjectLiveQueryResult`](../interfaces/InjectLiveQueryResult.md)\<\{ \[K in string \| number \| symbol\]: (TContext\["result"\] extends object ? any\[any\] : TContext\["hasJoins"\] extends true ? TContext\["schema"\] : TContext\["schema"\]\[TContext\["fromSourceName"\]\])\[K\] \}\> + +## Call Signature + +```ts +function injectLiveQuery(queryFn): InjectLiveQueryResult<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }>; +``` + +Defined in: [index.ts:71](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L71) + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### queryFn + +(`q`) => `QueryBuilder`\<`TContext`\> + +### Returns + +[`InjectLiveQueryResult`](../interfaces/InjectLiveQueryResult.md)\<\{ \[K in string \| number \| symbol\]: (TContext\["result"\] extends object ? any\[any\] : TContext\["hasJoins"\] extends true ? TContext\["schema"\] : TContext\["schema"\]\[TContext\["fromSourceName"\]\])\[K\] \}\> + +## Call Signature + +```ts +function injectLiveQuery(queryFn): InjectLiveQueryResult<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }>; +``` + +Defined in: [index.ts:74](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L74) + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### queryFn + +(`q`) => `QueryBuilder`\<`TContext`\> \| `null` \| `undefined` + +### Returns + +[`InjectLiveQueryResult`](../interfaces/InjectLiveQueryResult.md)\<\{ \[K in string \| number \| symbol\]: (TContext\["result"\] extends object ? any\[any\] : TContext\["hasJoins"\] extends true ? TContext\["schema"\] : TContext\["schema"\]\[TContext\["fromSourceName"\]\])\[K\] \}\> + +## Call Signature + +```ts +function injectLiveQuery(config): InjectLiveQueryResult<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }>; +``` + +Defined in: [index.ts:79](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L79) + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### config + +`LiveQueryCollectionConfig`\<`TContext`\> + +### Returns + +[`InjectLiveQueryResult`](../interfaces/InjectLiveQueryResult.md)\<\{ \[K in string \| number \| symbol\]: (TContext\["result"\] extends object ? any\[any\] : TContext\["hasJoins"\] extends true ? TContext\["schema"\] : TContext\["schema"\]\[TContext\["fromSourceName"\]\])\[K\] \}\> + +## Call Signature + +```ts +function injectLiveQuery(liveQueryCollection): InjectLiveQueryResult; +``` + +Defined in: [index.ts:82](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L82) + +### Type Parameters + +#### TResult + +`TResult` *extends* `object` + +#### TKey + +`TKey` *extends* `string` \| `number` + +#### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> + +### Parameters + +#### liveQueryCollection + +`Collection`\<`TResult`, `TKey`, `TUtils`\> + +### Returns + +[`InjectLiveQueryResult`](../interfaces/InjectLiveQueryResult.md)\<`TResult`, `TKey`, `TUtils`\> diff --git a/docs/framework/angular/reference/index.md b/docs/framework/angular/reference/index.md new file mode 100644 index 000000000..1321a5528 --- /dev/null +++ b/docs/framework/angular/reference/index.md @@ -0,0 +1,14 @@ +--- +id: "@tanstack/angular-db" +title: "@tanstack/angular-db" +--- + +# @tanstack/angular-db + +## Interfaces + +- [InjectLiveQueryResult](interfaces/InjectLiveQueryResult.md) + +## Functions + +- [injectLiveQuery](functions/injectLiveQuery.md) diff --git a/docs/framework/angular/reference/interfaces/InjectLiveQueryResult.md b/docs/framework/angular/reference/interfaces/InjectLiveQueryResult.md new file mode 100644 index 000000000..2e00287c2 --- /dev/null +++ b/docs/framework/angular/reference/interfaces/InjectLiveQueryResult.md @@ -0,0 +1,136 @@ +--- +id: InjectLiveQueryResult +title: InjectLiveQueryResult +--- + +# Interface: InjectLiveQueryResult\ + +Defined in: [index.ts:26](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L26) + +The result of calling `injectLiveQuery`. +Contains reactive signals for the query state and data. + +## Type Parameters + +### TResult + +`TResult` *extends* `object` = `any` + +### TKey + +`TKey` *extends* `string` \| `number` = `string` \| `number` + +### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> = \{ +\} + +## Properties + +### collection + +```ts +collection: Signal< + | Collection, TResult> +| null>; +``` + +Defined in: [index.ts:36](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L36) + +A signal containing the underlying collection instance (null for disabled queries) + +*** + +### data + +```ts +data: Signal; +``` + +Defined in: [index.ts:34](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L34) + +A signal containing the results as an array + +*** + +### isCleanedUp + +```ts +isCleanedUp: Signal; +``` + +Defined in: [index.ts:48](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L48) + +A signal indicating whether the collection has been cleaned up + +*** + +### isError + +```ts +isError: Signal; +``` + +Defined in: [index.ts:46](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L46) + +A signal indicating whether the collection has an error + +*** + +### isIdle + +```ts +isIdle: Signal; +``` + +Defined in: [index.ts:44](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L44) + +A signal indicating whether the collection is idle + +*** + +### isLoading + +```ts +isLoading: Signal; +``` + +Defined in: [index.ts:40](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L40) + +A signal indicating whether the collection is currently loading + +*** + +### isReady + +```ts +isReady: Signal; +``` + +Defined in: [index.ts:42](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L42) + +A signal indicating whether the collection is ready + +*** + +### state + +```ts +state: Signal>; +``` + +Defined in: [index.ts:32](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L32) + +A signal containing the complete state map of results keyed by their ID + +*** + +### status + +```ts +status: Signal; +``` + +Defined in: [index.ts:38](https://github.com/TanStack/db/blob/main/packages/angular-db/src/index.ts#L38) + +A signal containing the current status of the collection diff --git a/docs/framework/react/adapter.md b/docs/framework/react/adapter.md deleted file mode 100644 index 7a3b17c60..000000000 --- a/docs/framework/react/adapter.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: TanStack DB React Adapter -id: adapter ---- - -## Installation - -```sh -npm install @tanstack/react-db -``` - -## React Hooks - -See the [React Functions Reference](../reference/index.md) to see the full list of hooks available in the React Adapter. - -## Basic Usage diff --git a/docs/framework/react/overview.md b/docs/framework/react/overview.md new file mode 100644 index 000000000..ed96a7506 --- /dev/null +++ b/docs/framework/react/overview.md @@ -0,0 +1,163 @@ +--- +title: TanStack DB React Adapter +id: adapter +--- + +## Installation + +```sh +npm install @tanstack/react-db +``` + +## React Hooks + +See the [React Functions Reference](./reference/index.md) to see the full list of hooks available in the React Adapter. + +For comprehensive documentation on writing queries (filtering, joins, aggregations, etc.), see the [Live Queries Guide](../../guides/live-queries). + +## Basic Usage + +### useLiveQuery + +The `useLiveQuery` hook creates a live query that automatically updates your component when data changes: + +```tsx +import { useLiveQuery } from '@tanstack/react-db' + +function TodoList() { + const { data, isLoading } = useLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) + ) + + if (isLoading) return
Loading...
+ + return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+ ) +} +``` + +### Dependency Arrays + +All query hooks (`useLiveQuery`, `useLiveInfiniteQuery`, `useLiveSuspenseQuery`) accept an optional dependency array as their last parameter. This array works similarly to React's `useEffect` dependencies - when any value in the array changes, the query is recreated and re-executed. + +#### When to Use Dependency Arrays + +Use dependency arrays when your query depends on external reactive values (props, state, or other hooks): + +```tsx +function FilteredTodos({ minPriority }: { minPriority: number }) { + const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-run when minPriority changes + ) + + return
{data.length} high-priority todos
+} +``` + +#### What Happens When Dependencies Change + +When a dependency value changes: +1. The previous live query collection is cleaned up +2. A new query is created with the updated values +3. The component re-renders with the new data +4. The hook suspends (for `useLiveSuspenseQuery`) or shows loading state + +#### Best Practices + +**Include all external values used in the query:** + +```tsx +// Good - all external values in deps +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => and( + eq(todos.userId, userId), + eq(todos.status, status) + )), + [userId, status] +) + +// Bad - missing dependencies +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.userId, userId)), + [] // Missing userId! +) +``` + +**Empty array for static queries:** + +```tsx +// No external dependencies - query never changes +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }), + [] +) +``` + +**Omit the array for queries with no external dependencies:** + +```tsx +// Same as above - no deps needed +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) +) +``` + +### useLiveInfiniteQuery + +For paginated data with live updates, use `useLiveInfiniteQuery`: + +```tsx +const { data, pages, fetchNextPage, hasNextPage } = useLiveInfiniteQuery( + (q) => q + .from({ posts: postsCollection }) + .where(({ posts }) => eq(posts.category, category)) + .orderBy(({ posts }) => posts.createdAt, 'desc'), + { + pageSize: 20, + getNextPageParam: (lastPage, allPages) => + lastPage.length === 20 ? allPages.length : undefined + }, + [category] // Re-run when category changes +) +``` + +**Note:** The dependency array is only available when using the query function variant, not when passing a pre-created collection. + +### useLiveSuspenseQuery + +For React Suspense integration, use `useLiveSuspenseQuery`: + +```tsx +function TodoList({ filter }: { filter: string }) { + const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.filter, filter)), + [filter] // Re-suspends when filter changes + ) + + return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+ ) +} + +function App() { + return ( + Loading...}> + + + ) +} +``` + +When dependencies change, `useLiveSuspenseQuery` will re-suspend, showing your Suspense fallback until the new data is ready. diff --git a/docs/framework/react/reference/functions/useLiveInfiniteQuery.md b/docs/framework/react/reference/functions/useLiveInfiniteQuery.md new file mode 100644 index 000000000..8fe91f730 --- /dev/null +++ b/docs/framework/react/reference/functions/useLiveInfiniteQuery.md @@ -0,0 +1,217 @@ +--- +id: useLiveInfiniteQuery +title: useLiveInfiniteQuery +--- + +# Function: useLiveInfiniteQuery() + +## Call Signature + +```ts +function useLiveInfiniteQuery(liveQueryCollection, config): UseLiveInfiniteQueryReturn; +``` + +Defined in: [useLiveInfiniteQuery.ts:113](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L113) + +Create an infinite query using a query function with live updates + +Uses `utils.setWindow()` to dynamically adjust the limit/offset window +without recreating the live query collection on each page change. + +### Type Parameters + +#### TResult + +`TResult` *extends* `object` + +#### TKey + +`TKey` *extends* `string` \| `number` + +#### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> + +### Parameters + +#### liveQueryCollection + +`Collection`\<`TResult`, `TKey`, `TUtils`, `StandardSchemaV1`\<`unknown`, `unknown`\>, `TResult`\> & `NonSingleResult` + +#### config + +[`UseLiveInfiniteQueryConfig`](../type-aliases/UseLiveInfiniteQueryConfig.md)\<`any`\> + +Configuration including pageSize and getNextPageParam + +### Returns + +[`UseLiveInfiniteQueryReturn`](../type-aliases/UseLiveInfiniteQueryReturn.md)\<`any`\> + +Object with pages, data, and pagination controls + +### Examples + +```ts +// Basic infinite query +const { data, pages, fetchNextPage, hasNextPage } = useLiveInfiniteQuery( + (q) => q + .from({ posts: postsCollection }) + .orderBy(({ posts }) => posts.createdAt, 'desc') + .select(({ posts }) => ({ + id: posts.id, + title: posts.title + })), + { + pageSize: 20, + getNextPageParam: (lastPage, allPages) => + lastPage.length === 20 ? allPages.length : undefined + } +) +``` + +```ts +// With dependencies +const { pages, fetchNextPage } = useLiveInfiniteQuery( + (q) => q + .from({ posts: postsCollection }) + .where(({ posts }) => eq(posts.category, category)) + .orderBy(({ posts }) => posts.createdAt, 'desc'), + { + pageSize: 10, + getNextPageParam: (lastPage) => + lastPage.length === 10 ? lastPage.length : undefined + }, + [category] +) +``` + +```ts +// Router loader pattern with pre-created collection +// In loader: +const postsQuery = createLiveQueryCollection({ + query: (q) => q + .from({ posts: postsCollection }) + .orderBy(({ posts }) => posts.createdAt, 'desc') + .limit(20) +}) +await postsQuery.preload() +return { postsQuery } + +// In component: +const { postsQuery } = useLoaderData() +const { data, fetchNextPage, hasNextPage } = useLiveInfiniteQuery( + postsQuery, + { + pageSize: 20, + getNextPageParam: (lastPage) => lastPage.length === 20 ? lastPage.length : undefined + } +) +``` + +## Call Signature + +```ts +function useLiveInfiniteQuery( + queryFn, + config, +deps?): UseLiveInfiniteQueryReturn; +``` + +Defined in: [useLiveInfiniteQuery.ts:123](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveInfiniteQuery.ts#L123) + +Create an infinite query using a query function with live updates + +Uses `utils.setWindow()` to dynamically adjust the limit/offset window +without recreating the live query collection on each page change. + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### queryFn + +(`q`) => `QueryBuilder`\<`TContext`\> + +Query function that defines what data to fetch. Must include `.orderBy()` for setWindow to work. + +#### config + +[`UseLiveInfiniteQueryConfig`](../type-aliases/UseLiveInfiniteQueryConfig.md)\<`TContext`\> + +Configuration including pageSize and getNextPageParam + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +[`UseLiveInfiniteQueryReturn`](../type-aliases/UseLiveInfiniteQueryReturn.md)\<`TContext`\> + +Object with pages, data, and pagination controls + +### Examples + +```ts +// Basic infinite query +const { data, pages, fetchNextPage, hasNextPage } = useLiveInfiniteQuery( + (q) => q + .from({ posts: postsCollection }) + .orderBy(({ posts }) => posts.createdAt, 'desc') + .select(({ posts }) => ({ + id: posts.id, + title: posts.title + })), + { + pageSize: 20, + getNextPageParam: (lastPage, allPages) => + lastPage.length === 20 ? allPages.length : undefined + } +) +``` + +```ts +// With dependencies +const { pages, fetchNextPage } = useLiveInfiniteQuery( + (q) => q + .from({ posts: postsCollection }) + .where(({ posts }) => eq(posts.category, category)) + .orderBy(({ posts }) => posts.createdAt, 'desc'), + { + pageSize: 10, + getNextPageParam: (lastPage) => + lastPage.length === 10 ? lastPage.length : undefined + }, + [category] +) +``` + +```ts +// Router loader pattern with pre-created collection +// In loader: +const postsQuery = createLiveQueryCollection({ + query: (q) => q + .from({ posts: postsCollection }) + .orderBy(({ posts }) => posts.createdAt, 'desc') + .limit(20) +}) +await postsQuery.preload() +return { postsQuery } + +// In component: +const { postsQuery } = useLoaderData() +const { data, fetchNextPage, hasNextPage } = useLiveInfiniteQuery( + postsQuery, + { + pageSize: 20, + getNextPageParam: (lastPage) => lastPage.length === 20 ? lastPage.length : undefined + } +) +``` diff --git a/docs/framework/react/reference/functions/useLiveQuery.md b/docs/framework/react/reference/functions/useLiveQuery.md new file mode 100644 index 000000000..84162b674 --- /dev/null +++ b/docs/framework/react/reference/functions/useLiveQuery.md @@ -0,0 +1,1251 @@ +--- +id: useLiveQuery +title: useLiveQuery +--- + +# Function: useLiveQuery() + +## Call Signature + +```ts +function useLiveQuery(queryFn, deps?): object; +``` + +Defined in: [useLiveQuery.ts:84](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveQuery.ts#L84) + +Create a live query using a query function + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### queryFn + +(`q`) => `QueryBuilder`\<`TContext`\> + +Query function that defines what data to fetch + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +`object` + +Object with reactive data, state, and status information + +#### collection + +```ts +collection: Collection<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }, string | number, { +}>; +``` + +#### data + +```ts +data: InferResultType; +``` + +#### isCleanedUp + +```ts +isCleanedUp: boolean; +``` + +#### isEnabled + +```ts +isEnabled: true; +``` + +#### isError + +```ts +isError: boolean; +``` + +#### isIdle + +```ts +isIdle: boolean; +``` + +#### isLoading + +```ts +isLoading: boolean; +``` + +#### isReady + +```ts +isReady: boolean; +``` + +#### state + +```ts +state: Map; +``` + +#### status + +```ts +status: CollectionStatus; +``` + +### Examples + +```ts +// Basic query with object syntax +const { data, isLoading } = useLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) +) +``` + +```ts +// Single result query +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +``` + +```ts +// With dependencies that trigger re-execution +const { data, state } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-run when minPriority changes +) +``` + +```ts +// Join pattern +const { data } = useLiveQuery((q) => + q.from({ issues: issueCollection }) + .join({ persons: personCollection }, ({ issues, persons }) => + eq(issues.userId, persons.id) + ) + .select(({ issues, persons }) => ({ + id: issues.id, + title: issues.title, + userName: persons.name + })) +) +``` + +```ts +// Handle loading and error states +const { data, isLoading, isError, status } = useLiveQuery((q) => + q.from({ todos: todoCollection }) +) + +if (isLoading) return
Loading...
+if (isError) return
Error: {status}
+ +return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+) +``` + +## Call Signature + +```ts +function useLiveQuery(queryFn, deps?): object; +``` + +Defined in: [useLiveQuery.ts:101](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveQuery.ts#L101) + +Create a live query using a query function + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### queryFn + +(`q`) => `QueryBuilder`\<`TContext`\> \| `null` \| `undefined` + +Query function that defines what data to fetch + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +`object` + +Object with reactive data, state, and status information + +#### collection + +```ts +collection: + | Collection<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }, string | number, { +}, StandardSchemaV1, { [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }> + | undefined; +``` + +#### data + +```ts +data: InferResultType | undefined; +``` + +#### isCleanedUp + +```ts +isCleanedUp: boolean; +``` + +#### isEnabled + +```ts +isEnabled: boolean; +``` + +#### isError + +```ts +isError: boolean; +``` + +#### isIdle + +```ts +isIdle: boolean; +``` + +#### isLoading + +```ts +isLoading: boolean; +``` + +#### isReady + +```ts +isReady: boolean; +``` + +#### state + +```ts +state: + | Map + | undefined; +``` + +#### status + +```ts +status: UseLiveQueryStatus; +``` + +### Examples + +```ts +// Basic query with object syntax +const { data, isLoading } = useLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) +) +``` + +```ts +// Single result query +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +``` + +```ts +// With dependencies that trigger re-execution +const { data, state } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-run when minPriority changes +) +``` + +```ts +// Join pattern +const { data } = useLiveQuery((q) => + q.from({ issues: issueCollection }) + .join({ persons: personCollection }, ({ issues, persons }) => + eq(issues.userId, persons.id) + ) + .select(({ issues, persons }) => ({ + id: issues.id, + title: issues.title, + userName: persons.name + })) +) +``` + +```ts +// Handle loading and error states +const { data, isLoading, isError, status } = useLiveQuery((q) => + q.from({ todos: todoCollection }) +) + +if (isLoading) return
Loading...
+if (isError) return
Error: {status}
+ +return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+) +``` + +## Call Signature + +```ts +function useLiveQuery(queryFn, deps?): object; +``` + +Defined in: [useLiveQuery.ts:120](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveQuery.ts#L120) + +Create a live query using a query function + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### queryFn + +(`q`) => + \| `LiveQueryCollectionConfig`\<`TContext`, \{ \[K in string \| number \| symbol\]: (TContext\["result"\] extends object ? any\[any\] : TContext\["hasJoins"\] extends true ? TContext\["schema"\] : TContext\["schema"\]\[TContext\["fromSourceName"\]\])\[K\] \} & `object`\> + \| `null` + \| `undefined` + +Query function that defines what data to fetch + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +`object` + +Object with reactive data, state, and status information + +#### collection + +```ts +collection: + | Collection<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }, string | number, { +}, StandardSchemaV1, { [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }> + | undefined; +``` + +#### data + +```ts +data: InferResultType | undefined; +``` + +#### isCleanedUp + +```ts +isCleanedUp: boolean; +``` + +#### isEnabled + +```ts +isEnabled: boolean; +``` + +#### isError + +```ts +isError: boolean; +``` + +#### isIdle + +```ts +isIdle: boolean; +``` + +#### isLoading + +```ts +isLoading: boolean; +``` + +#### isReady + +```ts +isReady: boolean; +``` + +#### state + +```ts +state: + | Map + | undefined; +``` + +#### status + +```ts +status: UseLiveQueryStatus; +``` + +### Examples + +```ts +// Basic query with object syntax +const { data, isLoading } = useLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) +) +``` + +```ts +// Single result query +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +``` + +```ts +// With dependencies that trigger re-execution +const { data, state } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-run when minPriority changes +) +``` + +```ts +// Join pattern +const { data } = useLiveQuery((q) => + q.from({ issues: issueCollection }) + .join({ persons: personCollection }, ({ issues, persons }) => + eq(issues.userId, persons.id) + ) + .select(({ issues, persons }) => ({ + id: issues.id, + title: issues.title, + userName: persons.name + })) +) +``` + +```ts +// Handle loading and error states +const { data, isLoading, isError, status } = useLiveQuery((q) => + q.from({ todos: todoCollection }) +) + +if (isLoading) return
Loading...
+if (isError) return
Error: {status}
+ +return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+) +``` + +## Call Signature + +```ts +function useLiveQuery(queryFn, deps?): object; +``` + +Defined in: [useLiveQuery.ts:139](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveQuery.ts#L139) + +Create a live query using a query function + +### Type Parameters + +#### TResult + +`TResult` *extends* `object` + +#### TKey + +`TKey` *extends* `string` \| `number` + +#### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> + +### Parameters + +#### queryFn + +(`q`) => + \| `Collection`\<`TResult`, `TKey`, `TUtils`, `StandardSchemaV1`\<`unknown`, `unknown`\>, `TResult`\> + \| `null` + \| `undefined` + +Query function that defines what data to fetch + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +`object` + +Object with reactive data, state, and status information + +#### collection + +```ts +collection: + | Collection, TResult> + | undefined; +``` + +#### data + +```ts +data: TResult[] | undefined; +``` + +#### isCleanedUp + +```ts +isCleanedUp: boolean; +``` + +#### isEnabled + +```ts +isEnabled: boolean; +``` + +#### isError + +```ts +isError: boolean; +``` + +#### isIdle + +```ts +isIdle: boolean; +``` + +#### isLoading + +```ts +isLoading: boolean; +``` + +#### isReady + +```ts +isReady: boolean; +``` + +#### state + +```ts +state: Map | undefined; +``` + +#### status + +```ts +status: UseLiveQueryStatus; +``` + +### Examples + +```ts +// Basic query with object syntax +const { data, isLoading } = useLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) +) +``` + +```ts +// Single result query +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +``` + +```ts +// With dependencies that trigger re-execution +const { data, state } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-run when minPriority changes +) +``` + +```ts +// Join pattern +const { data } = useLiveQuery((q) => + q.from({ issues: issueCollection }) + .join({ persons: personCollection }, ({ issues, persons }) => + eq(issues.userId, persons.id) + ) + .select(({ issues, persons }) => ({ + id: issues.id, + title: issues.title, + userName: persons.name + })) +) +``` + +```ts +// Handle loading and error states +const { data, isLoading, isError, status } = useLiveQuery((q) => + q.from({ todos: todoCollection }) +) + +if (isLoading) return
Loading...
+if (isError) return
Error: {status}
+ +return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+) +``` + +## Call Signature + +```ts +function useLiveQuery(queryFn, deps?): object; +``` + +Defined in: [useLiveQuery.ts:162](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveQuery.ts#L162) + +Create a live query using a query function + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +#### TResult + +`TResult` *extends* `object` + +#### TKey + +`TKey` *extends* `string` \| `number` + +#### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> + +### Parameters + +#### queryFn + +(`q`) => + \| `QueryBuilder`\<`TContext`\> + \| `LiveQueryCollectionConfig`\<`TContext`, \{ \[K in string \| number \| symbol\]: (TContext\["result"\] extends object ? any\[any\] : TContext\["hasJoins"\] extends true ? TContext\["schema"\] : TContext\["schema"\]\[TContext\["fromSourceName"\]\])\[K\] \} & `object`\> + \| `Collection`\<`TResult`, `TKey`, `TUtils`, `StandardSchemaV1`\<`unknown`, `unknown`\>, `TResult`\> + \| `null` + \| `undefined` + +Query function that defines what data to fetch + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +`object` + +Object with reactive data, state, and status information + +#### collection + +```ts +collection: + | Collection, TResult> + | Collection<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }, string | number, { +}, StandardSchemaV1, { [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }> + | undefined; +``` + +#### data + +```ts +data: InferResultType | TResult[] | undefined; +``` + +#### isCleanedUp + +```ts +isCleanedUp: boolean; +``` + +#### isEnabled + +```ts +isEnabled: boolean; +``` + +#### isError + +```ts +isError: boolean; +``` + +#### isIdle + +```ts +isIdle: boolean; +``` + +#### isLoading + +```ts +isLoading: boolean; +``` + +#### isReady + +```ts +isReady: boolean; +``` + +#### state + +```ts +state: + | Map + | Map + | undefined; +``` + +#### status + +```ts +status: UseLiveQueryStatus; +``` + +### Examples + +```ts +// Basic query with object syntax +const { data, isLoading } = useLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) +) +``` + +```ts +// Single result query +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +``` + +```ts +// With dependencies that trigger re-execution +const { data, state } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-run when minPriority changes +) +``` + +```ts +// Join pattern +const { data } = useLiveQuery((q) => + q.from({ issues: issueCollection }) + .join({ persons: personCollection }, ({ issues, persons }) => + eq(issues.userId, persons.id) + ) + .select(({ issues, persons }) => ({ + id: issues.id, + title: issues.title, + userName: persons.name + })) +) +``` + +```ts +// Handle loading and error states +const { data, isLoading, isError, status } = useLiveQuery((q) => + q.from({ todos: todoCollection }) +) + +if (isLoading) return
Loading...
+if (isError) return
Error: {status}
+ +return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+) +``` + +## Call Signature + +```ts +function useLiveQuery(config, deps?): object; +``` + +Defined in: [useLiveQuery.ts:230](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveQuery.ts#L230) + +Create a live query using configuration object + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### config + +`LiveQueryCollectionConfig`\<`TContext`\> + +Configuration object with query and options + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +`object` + +Object with reactive data, state, and status information + +#### collection + +```ts +collection: Collection<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }, string | number, { +}>; +``` + +#### data + +```ts +data: InferResultType; +``` + +#### isCleanedUp + +```ts +isCleanedUp: boolean; +``` + +#### isEnabled + +```ts +isEnabled: true; +``` + +#### isError + +```ts +isError: boolean; +``` + +#### isIdle + +```ts +isIdle: boolean; +``` + +#### isLoading + +```ts +isLoading: boolean; +``` + +#### isReady + +```ts +isReady: boolean; +``` + +#### state + +```ts +state: Map; +``` + +#### status + +```ts +status: CollectionStatus; +``` + +### Examples + +```ts +// Basic config object usage +const { data, status } = useLiveQuery({ + query: (q) => q.from({ todos: todosCollection }), + gcTime: 60000 +}) +``` + +```ts +// With query builder and options +const queryBuilder = new Query() + .from({ persons: collection }) + .where(({ persons }) => gt(persons.age, 30)) + .select(({ persons }) => ({ id: persons.id, name: persons.name })) + +const { data, isReady } = useLiveQuery({ query: queryBuilder }) +``` + +```ts +// Handle all states uniformly +const { data, isLoading, isReady, isError } = useLiveQuery({ + query: (q) => q.from({ items: itemCollection }) +}) + +if (isLoading) return
Loading...
+if (isError) return
Something went wrong
+if (!isReady) return
Preparing...
+ +return
{data.length} items loaded
+``` + +## Call Signature + +```ts +function useLiveQuery(liveQueryCollection): object; +``` + +Defined in: [useLiveQuery.ts:276](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveQuery.ts#L276) + +Subscribe to an existing live query collection + +### Type Parameters + +#### TResult + +`TResult` *extends* `object` + +#### TKey + +`TKey` *extends* `string` \| `number` + +#### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> + +### Parameters + +#### liveQueryCollection + +`Collection`\<`TResult`, `TKey`, `TUtils`, `StandardSchemaV1`\<`unknown`, `unknown`\>, `TResult`\> & `NonSingleResult` + +Pre-created live query collection to subscribe to + +### Returns + +`object` + +Object with reactive data, state, and status information + +#### collection + +```ts +collection: Collection; +``` + +#### data + +```ts +data: TResult[]; +``` + +#### isCleanedUp + +```ts +isCleanedUp: boolean; +``` + +#### isEnabled + +```ts +isEnabled: true; +``` + +#### isError + +```ts +isError: boolean; +``` + +#### isIdle + +```ts +isIdle: boolean; +``` + +#### isLoading + +```ts +isLoading: boolean; +``` + +#### isReady + +```ts +isReady: boolean; +``` + +#### state + +```ts +state: Map; +``` + +#### status + +```ts +status: CollectionStatus; +``` + +### Examples + +```ts +// Using pre-created live query collection +const myLiveQuery = createLiveQueryCollection((q) => + q.from({ todos: todosCollection }).where(({ todos }) => eq(todos.active, true)) +) +const { data, collection } = useLiveQuery(myLiveQuery) +``` + +```ts +// Access collection methods directly +const { data, collection, isReady } = useLiveQuery(existingCollection) + +// Use collection for mutations +const handleToggle = (id) => { + collection.update(id, draft => { draft.completed = !draft.completed }) +} +``` + +```ts +// Handle states consistently +const { data, isLoading, isError } = useLiveQuery(sharedCollection) + +if (isLoading) return
Loading...
+if (isError) return
Error loading data
+ +return
{data.map(item => )}
+``` + +## Call Signature + +```ts +function useLiveQuery(liveQueryCollection): object; +``` + +Defined in: [useLiveQuery.ts:296](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveQuery.ts#L296) + +Create a live query using a query function + +### Type Parameters + +#### TResult + +`TResult` *extends* `object` + +#### TKey + +`TKey` *extends* `string` \| `number` + +#### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> + +### Parameters + +#### liveQueryCollection + +`Collection`\<`TResult`, `TKey`, `TUtils`, `StandardSchemaV1`\<`unknown`, `unknown`\>, `TResult`\> & `SingleResult` + +### Returns + +`object` + +Object with reactive data, state, and status information + +#### collection + +```ts +collection: Collection, TResult> & SingleResult; +``` + +#### data + +```ts +data: TResult | undefined; +``` + +#### isCleanedUp + +```ts +isCleanedUp: boolean; +``` + +#### isEnabled + +```ts +isEnabled: true; +``` + +#### isError + +```ts +isError: boolean; +``` + +#### isIdle + +```ts +isIdle: boolean; +``` + +#### isLoading + +```ts +isLoading: boolean; +``` + +#### isReady + +```ts +isReady: boolean; +``` + +#### state + +```ts +state: Map; +``` + +#### status + +```ts +status: CollectionStatus; +``` + +### Examples + +```ts +// Basic query with object syntax +const { data, isLoading } = useLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) +) +``` + +```ts +// Single result query +const { data } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +``` + +```ts +// With dependencies that trigger re-execution +const { data, state } = useLiveQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-run when minPriority changes +) +``` + +```ts +// Join pattern +const { data } = useLiveQuery((q) => + q.from({ issues: issueCollection }) + .join({ persons: personCollection }, ({ issues, persons }) => + eq(issues.userId, persons.id) + ) + .select(({ issues, persons }) => ({ + id: issues.id, + title: issues.title, + userName: persons.name + })) +) +``` + +```ts +// Handle loading and error states +const { data, isLoading, isError, status } = useLiveQuery((q) => + q.from({ todos: todoCollection }) +) + +if (isLoading) return
Loading...
+if (isError) return
Error: {status}
+ +return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+) +``` diff --git a/docs/framework/react/reference/functions/useLiveSuspenseQuery.md b/docs/framework/react/reference/functions/useLiveSuspenseQuery.md new file mode 100644 index 000000000..94c00a9fd --- /dev/null +++ b/docs/framework/react/reference/functions/useLiveSuspenseQuery.md @@ -0,0 +1,626 @@ +--- +id: useLiveSuspenseQuery +title: useLiveSuspenseQuery +--- + +# Function: useLiveSuspenseQuery() + +## Call Signature + +```ts +function useLiveSuspenseQuery(queryFn, deps?): object; +``` + +Defined in: [useLiveSuspenseQuery.ts:109](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveSuspenseQuery.ts#L109) + +Create a live query with React Suspense support + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### queryFn + +(`q`) => `QueryBuilder`\<`TContext`\> + +Query function that defines what data to fetch + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +`object` + +Object with reactive data and state - data is guaranteed to be defined + +#### collection + +```ts +collection: Collection<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }, string | number, { +}>; +``` + +#### data + +```ts +data: InferResultType; +``` + +#### state + +```ts +state: Map; +``` + +### Throws + +Promise when data is loading (caught by Suspense boundary) + +### Throws + +Error when collection fails (caught by Error boundary) + +### Examples + +```ts +// Basic usage with Suspense +function TodoList() { + const { data } = useLiveSuspenseQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) + ) + + return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+ ) +} + +function App() { + return ( + Loading...}> + + + ) +} +``` + +```ts +// Single result query +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +// data is guaranteed to be the single item (or undefined if not found) +``` + +```ts +// With dependencies that trigger re-suspension +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-suspends when minPriority changes +) +``` + +```ts +// With Error boundary +function App() { + return ( + Error loading data}> + Loading...}> + + + + ) +} +``` + +### Remarks + +**Important:** This hook does NOT support disabled queries (returning undefined/null). +Following TanStack Query's useSuspenseQuery design, the query callback must always +return a valid query, collection, or config object. + +❌ **This will cause a type error:** +```ts +useLiveSuspenseQuery( + (q) => userId ? q.from({ users }) : undefined // ❌ Error! +) +``` + +✅ **Use conditional rendering instead:** +```ts +function Profile({ userId }: { userId: string }) { + const { data } = useLiveSuspenseQuery( + (q) => q.from({ users }).where(({ users }) => eq(users.id, userId)) + ) + return
{data.name}
+} + +// In parent component: +{userId ? :
No user
} +``` + +✅ **Or use useLiveQuery for conditional queries:** +```ts +const { data, isEnabled } = useLiveQuery( + (q) => userId ? q.from({ users }) : undefined, // ✅ Supported! + [userId] +) +``` + +## Call Signature + +```ts +function useLiveSuspenseQuery(config, deps?): object; +``` + +Defined in: [useLiveSuspenseQuery.ts:119](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveSuspenseQuery.ts#L119) + +Create a live query with React Suspense support + +### Type Parameters + +#### TContext + +`TContext` *extends* `Context` + +### Parameters + +#### config + +`LiveQueryCollectionConfig`\<`TContext`\> + +#### deps? + +`unknown`[] + +Array of dependencies that trigger query re-execution when changed + +### Returns + +`object` + +Object with reactive data and state - data is guaranteed to be defined + +#### collection + +```ts +collection: Collection<{ [K in string | number | symbol]: (TContext["result"] extends object ? any[any] : TContext["hasJoins"] extends true ? TContext["schema"] : TContext["schema"][TContext["fromSourceName"]])[K] }, string | number, { +}>; +``` + +#### data + +```ts +data: InferResultType; +``` + +#### state + +```ts +state: Map; +``` + +### Throws + +Promise when data is loading (caught by Suspense boundary) + +### Throws + +Error when collection fails (caught by Error boundary) + +### Examples + +```ts +// Basic usage with Suspense +function TodoList() { + const { data } = useLiveSuspenseQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) + ) + + return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+ ) +} + +function App() { + return ( + Loading...}> + + + ) +} +``` + +```ts +// Single result query +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +// data is guaranteed to be the single item (or undefined if not found) +``` + +```ts +// With dependencies that trigger re-suspension +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-suspends when minPriority changes +) +``` + +```ts +// With Error boundary +function App() { + return ( + Error loading data}> + Loading...}> + + + + ) +} +``` + +### Remarks + +**Important:** This hook does NOT support disabled queries (returning undefined/null). +Following TanStack Query's useSuspenseQuery design, the query callback must always +return a valid query, collection, or config object. + +❌ **This will cause a type error:** +```ts +useLiveSuspenseQuery( + (q) => userId ? q.from({ users }) : undefined // ❌ Error! +) +``` + +✅ **Use conditional rendering instead:** +```ts +function Profile({ userId }: { userId: string }) { + const { data } = useLiveSuspenseQuery( + (q) => q.from({ users }).where(({ users }) => eq(users.id, userId)) + ) + return
{data.name}
+} + +// In parent component: +{userId ? :
No user
} +``` + +✅ **Or use useLiveQuery for conditional queries:** +```ts +const { data, isEnabled } = useLiveQuery( + (q) => userId ? q.from({ users }) : undefined, // ✅ Supported! + [userId] +) +``` + +## Call Signature + +```ts +function useLiveSuspenseQuery(liveQueryCollection): object; +``` + +Defined in: [useLiveSuspenseQuery.ts:129](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveSuspenseQuery.ts#L129) + +Create a live query with React Suspense support + +### Type Parameters + +#### TResult + +`TResult` *extends* `object` + +#### TKey + +`TKey` *extends* `string` \| `number` + +#### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> + +### Parameters + +#### liveQueryCollection + +`Collection`\<`TResult`, `TKey`, `TUtils`, `StandardSchemaV1`\<`unknown`, `unknown`\>, `TResult`\> & `NonSingleResult` + +### Returns + +`object` + +Object with reactive data and state - data is guaranteed to be defined + +#### collection + +```ts +collection: Collection; +``` + +#### data + +```ts +data: TResult[]; +``` + +#### state + +```ts +state: Map; +``` + +### Throws + +Promise when data is loading (caught by Suspense boundary) + +### Throws + +Error when collection fails (caught by Error boundary) + +### Examples + +```ts +// Basic usage with Suspense +function TodoList() { + const { data } = useLiveSuspenseQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) + ) + + return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+ ) +} + +function App() { + return ( + Loading...}> + + + ) +} +``` + +```ts +// Single result query +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +// data is guaranteed to be the single item (or undefined if not found) +``` + +```ts +// With dependencies that trigger re-suspension +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-suspends when minPriority changes +) +``` + +```ts +// With Error boundary +function App() { + return ( + Error loading data}> + Loading...}> + + + + ) +} +``` + +### Remarks + +**Important:** This hook does NOT support disabled queries (returning undefined/null). +Following TanStack Query's useSuspenseQuery design, the query callback must always +return a valid query, collection, or config object. + +❌ **This will cause a type error:** +```ts +useLiveSuspenseQuery( + (q) => userId ? q.from({ users }) : undefined // ❌ Error! +) +``` + +✅ **Use conditional rendering instead:** +```ts +function Profile({ userId }: { userId: string }) { + const { data } = useLiveSuspenseQuery( + (q) => q.from({ users }).where(({ users }) => eq(users.id, userId)) + ) + return
{data.name}
+} + +// In parent component: +{userId ? :
No user
} +``` + +✅ **Or use useLiveQuery for conditional queries:** +```ts +const { data, isEnabled } = useLiveQuery( + (q) => userId ? q.from({ users }) : undefined, // ✅ Supported! + [userId] +) +``` + +## Call Signature + +```ts +function useLiveSuspenseQuery(liveQueryCollection): object; +``` + +Defined in: [useLiveSuspenseQuery.ts:142](https://github.com/TanStack/db/blob/main/packages/react-db/src/useLiveSuspenseQuery.ts#L142) + +Create a live query with React Suspense support + +### Type Parameters + +#### TResult + +`TResult` *extends* `object` + +#### TKey + +`TKey` *extends* `string` \| `number` + +#### TUtils + +`TUtils` *extends* `Record`\<`string`, `any`\> + +### Parameters + +#### liveQueryCollection + +`Collection`\<`TResult`, `TKey`, `TUtils`, `StandardSchemaV1`\<`unknown`, `unknown`\>, `TResult`\> & `SingleResult` + +### Returns + +`object` + +Object with reactive data and state - data is guaranteed to be defined + +#### collection + +```ts +collection: Collection, TResult> & SingleResult; +``` + +#### data + +```ts +data: TResult | undefined; +``` + +#### state + +```ts +state: Map; +``` + +### Throws + +Promise when data is loading (caught by Suspense boundary) + +### Throws + +Error when collection fails (caught by Error boundary) + +### Examples + +```ts +// Basic usage with Suspense +function TodoList() { + const { data } = useLiveSuspenseQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) + .select(({ todos }) => ({ id: todos.id, text: todos.text })) + ) + + return ( +
    + {data.map(todo =>
  • {todo.text}
  • )} +
+ ) +} + +function App() { + return ( + Loading...}> + + + ) +} +``` + +```ts +// Single result query +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.id, 1)) + .findOne() +) +// data is guaranteed to be the single item (or undefined if not found) +``` + +```ts +// With dependencies that trigger re-suspension +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, minPriority)), + [minPriority] // Re-suspends when minPriority changes +) +``` + +```ts +// With Error boundary +function App() { + return ( + Error loading data}> + Loading...}> + + + + ) +} +``` + +### Remarks + +**Important:** This hook does NOT support disabled queries (returning undefined/null). +Following TanStack Query's useSuspenseQuery design, the query callback must always +return a valid query, collection, or config object. + +❌ **This will cause a type error:** +```ts +useLiveSuspenseQuery( + (q) => userId ? q.from({ users }) : undefined // ❌ Error! +) +``` + +✅ **Use conditional rendering instead:** +```ts +function Profile({ userId }: { userId: string }) { + const { data } = useLiveSuspenseQuery( + (q) => q.from({ users }).where(({ users }) => eq(users.id, userId)) + ) + return
{data.name}
+} + +// In parent component: +{userId ? :
No user
} +``` + +✅ **Or use useLiveQuery for conditional queries:** +```ts +const { data, isEnabled } = useLiveQuery( + (q) => userId ? q.from({ users }) : undefined, // ✅ Supported! + [userId] +) +``` diff --git a/docs/framework/react/reference/functions/usePacedMutations.md b/docs/framework/react/reference/functions/usePacedMutations.md new file mode 100644 index 000000000..4f50b0411 --- /dev/null +++ b/docs/framework/react/reference/functions/usePacedMutations.md @@ -0,0 +1,131 @@ +--- +id: usePacedMutations +title: usePacedMutations +--- + +# Function: usePacedMutations() + +```ts +function usePacedMutations(config): (variables) => Transaction; +``` + +Defined in: [usePacedMutations.ts:93](https://github.com/TanStack/db/blob/main/packages/react-db/src/usePacedMutations.ts#L93) + +React hook for managing paced mutations with timing strategies. + +Provides optimistic mutations with pluggable strategies like debouncing, +queuing, or throttling. The optimistic updates are applied immediately via +`onMutate`, and the actual persistence is controlled by the strategy. + +## Type Parameters + +### TVariables + +`TVariables` = `unknown` + +### T + +`T` *extends* `object` = `Record`\<`string`, `unknown`\> + +## Parameters + +### config + +`PacedMutationsConfig`\<`TVariables`, `T`\> + +Configuration including onMutate, mutationFn and strategy + +## Returns + +A mutate function that accepts variables and returns Transaction objects + +```ts +(variables): Transaction; +``` + +### Parameters + +#### variables + +`TVariables` + +### Returns + +`Transaction`\<`T`\> + +## Examples + +```tsx +// Debounced auto-save +function AutoSaveForm({ formId }: { formId: string }) { + const mutate = usePacedMutations({ + onMutate: (value) => { + // Apply optimistic update immediately + formCollection.update(formId, draft => { + draft.content = value + }) + }, + mutationFn: async ({ transaction }) => { + await api.save(transaction.mutations) + }, + strategy: debounceStrategy({ wait: 500 }) + }) + + const handleChange = async (value: string) => { + const tx = mutate(value) + + // Optional: await persistence or handle errors + try { + await tx.isPersisted.promise + console.log('Saved!') + } catch (error) { + console.error('Save failed:', error) + } + } + + return