diff --git a/.github/ISSUE_TEMPLATE/issue.md b/.github/ISSUE_TEMPLATE/issue.md index f5b6cfebc5..c816876020 100644 --- a/.github/ISSUE_TEMPLATE/issue.md +++ b/.github/ISSUE_TEMPLATE/issue.md @@ -15,7 +15,7 @@ Write the description of the issue here ### Info - Environment: (Node.js/browser/hybrid app/etc.) - Platform: (Chrome/FF/Safari/Edge/iOS/Android/etc.) -- Adapter: (idb/indexeddb/memory/leveldb/etc.) +- Adapter: (idb/indexeddb/nodesqlite/etc.) - Server: (CouchDB/Cloudant/PouchDB Server/etc.) ### Reproduce diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 71a503c937..bbc6cb53ec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -69,7 +69,7 @@ jobs: fail-fast: false matrix: couchdb: ['2.3', '3.1'] - node: [20, 22] + node: [22] cmd: - npm test - TYPE=find PLUGINS=pouchdb-find ADAPTERS=http npm test @@ -158,60 +158,6 @@ jobs: if: steps.retry.outcome == 'failure' run: ${{ matrix.cmd }} - # Run the integration, find and mapreduce tests against all the Node.js - # PouchDB adapters. This should be run for every adapter on every version of - # Node.js we support. - - nodejs-adapter: - needs: lint - strategy: - fail-fast: false - matrix: - node: [20, 22] - adapter: ['leveldb', 'memory'] - cmd: - - npm test - - TYPE=find PLUGINS=pouchdb-find npm test - - TYPE=mapreduce npm test - runs-on: ubuntu-latest - services: - couchdb: - image: couchdb:3.1 - ports: - - 5984:5984 - env: - COUCHDB_USER: admin - COUCHDB_PASSWORD: password - env: - CLIENT: node - SERVER: couchdb-master - COUCH_HOST: http://admin:password@127.0.0.1:5984 - SKIP_MIGRATION: 1 - ADAPTERS: ${{ matrix.adapter }} - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Wait for CouchDB - run: ./bin/wait-for-couch.sh 20 - - name: Setup CouchDB CORS - run: curl 'http://admin:password@127.0.0.1:5984/_node/_local/_config/cors/origins' -X PUT -d '"http://127.0.0.1:8000"' - - uses: ./.github/actions/install-node-package - with: - node-version: ${{ matrix.node }} - - uses: ./.github/actions/build-pouchdb - - id: test - run: ${{ matrix.cmd }} - continue-on-error: true - - name: First retry - id: retry - if: steps.test.outcome == 'failure' - run: ${{ matrix.cmd }} - continue-on-error: true - - name: Second retry - if: steps.retry.outcome == 'failure' - run: ${{ matrix.cmd }} - # Run the integration, find and mapreduce tests against all the browser-based # adapters. PouchDB adapters. This should be run for every adapter on every # target browser. @@ -222,7 +168,7 @@ jobs: fail-fast: false matrix: client: ['firefox', 'chromium', 'webkit'] - adapter: ['idb', 'indexeddb', 'memory'] + adapter: ['idb', 'indexeddb'] cmd: - npm test - TYPE=find PLUGINS=pouchdb-find npm test @@ -275,7 +221,7 @@ jobs: strategy: fail-fast: false matrix: - node: [20, 22] + node: [22] cmd: - CLIENT=firefox npm run test-webpack - AUTO_COMPACTION=true npm test @@ -324,7 +270,7 @@ jobs: strategy: fail-fast: false matrix: - node: [20, 22] + node: [22] cmd: - TYPE=performance npm test runs-on: ubuntu-latest @@ -347,3 +293,54 @@ jobs: - name: Second retry if: steps.retry.outcome == 'failure' run: git reset --hard && ${{ matrix.cmd }} + + nodejs-sqlite-adapter: + needs: lint + strategy: + fail-fast: false + matrix: + node: [22] + adapter: ['nodesqlite'] + cmd: + - npm test + - TYPE=find PLUGINS=pouchdb-find npm test + - TYPE=mapreduce npm test + runs-on: ubuntu-latest + services: + couchdb: + image: couchdb:3.1 + ports: + - 5984:5984 + env: + COUCHDB_USER: admin + COUCHDB_PASSWORD: password + env: + CLIENT: node + SERVER: couchdb-master + COUCH_HOST: http://admin:password@127.0.0.1:5984 + SKIP_MIGRATION: 1 + ADAPTERS: ${{ matrix.adapter }} + VIEW_ADAPTERS: nodesqlite + steps: + - uses: actions/checkout@v6 + with: + persist-credentials: false + - name: Wait for CouchDB + run: ./bin/wait-for-couch.sh 20 + - name: Setup CouchDB CORS + run: curl 'http://admin:password@127.0.0.1:5984/_node/_local/_config/cors/origins' -X PUT -d '"http://127.0.0.1:8000"' + - uses: ./.github/actions/install-node-package + with: + node-version: ${{ matrix.node }} + - uses: ./.github/actions/build-pouchdb + - id: test + run: ${{ matrix.cmd }} + continue-on-error: true + - name: First retry + id: retry + if: steps.test.outcome == 'failure' + run: ${{ matrix.cmd }} + continue-on-error: true + - name: Second retry + if: steps.retry.outcome == 'failure' + run: ${{ matrix.cmd }} diff --git a/.gitignore b/.gitignore index a1758bd6f9..95fe73d7a2 100644 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,4 @@ yarn.lock release-todo.txt /perf-test-results/ /dist-bundles/ +*.sqlite diff --git a/TESTING.md b/TESTING.md index 961d8fa151..9546d9a16b 100644 --- a/TESTING.md +++ b/TESTING.md @@ -65,8 +65,7 @@ The main test suite can be run using the following command: > If the tests don‘t pass on `main`, try building once before running the tests: `$npm run build`. PouchDB runs in the browser and on Node.js, and has multiple different storage -backends known as _adapters_. In the browser these are `idb`, `indexeddb` and -`memory` and on Node.js they're `leveldb` and `memory`. +backends known as _adapters_. In the browser these are `idb` and `indexeddb` and on Node.js there is `nodesqlite`. It also includes an adapter named `http`, which works by delegating operations to CouchDB (or anything that's API-compatible with it) over the network. Since @@ -92,8 +91,8 @@ databases. These are selected automatically based on the execution environment, but this variable overrides the default choice and causes additional adapters to be loaded if they're not part of the default distribution. -On Node.js the available local adapters are `leveldb` and `memory`. In the -browser they're `idb`, `indexeddb` and `memory`. +On Node.js the available local adapter is `nodesqlite`. In the +browser they're `idb` and `indexeddb`. You can also set `ADAPTERS=http` to force all PouchDB databases to be created on a remote server, identified by `COUCH_HOST`. This is not necessary for @@ -181,14 +180,14 @@ following values: Set this to `1` to skip the migration tests. -#### `VIEW_ADAPTERS` (default: `memory`) +#### `VIEW_ADAPTERS` (default: `nodesqlite`) Comma-separated list of preferred view adapter backends that PouchDB will use. This variable overrides the default choice and causes additional adapters to be loaded if they're not part of the default distribution. -On Node.js the available adapters are `leveldb` and `memory`. In the -browser they're `idb`, `indexeddb` and `memory`. +On Node.js the available adapter is `nodesqlite`. In the +browser they're `idb` and `indexeddb`. ## Other sets of tests @@ -213,8 +212,8 @@ These suites support most of the same options as the integration tests. You'll want to test specific adapters by specifying them on the command-line, for example: - # run the "find" tests with the memory client on node.js - $ TYPE=find PLUGINS=pouchdb-find CLIENT=node ADAPTERS=memory npm test + # run the "find" tests with the nodesqlite client on node.js + $ TYPE=find PLUGINS=pouchdb-find CLIENT=node ADAPTERS=nodesqlite npm test # run the "mapreduce" tests with indexeddb in firefox $ TYPE=mapreduce CLIENT=firefox ADAPTERS=indexeddb npm test diff --git a/bin/build-module.js b/bin/build-module.js index edd10dcf2b..6cf62b6112 100755 --- a/bin/build-module.js +++ b/bin/build-module.js @@ -31,8 +31,7 @@ var AGGRESSIVELY_BUNDLED_PACKAGES = var BROWSER_ONLY_PACKAGES = ['pouchdb-browser']; // packages that only use the browser field to ignore dependencies -var BROWSER_DEPENDENCY_ONLY_PACKAGES = - ['pouchdb-adapter-leveldb']; +var BROWSER_DEPENDENCY_ONLY_PACKAGES = ['pouchdb-adapter-nodesqlite']; function buildModule(filepath) { var pkg = require(path.resolve(filepath, 'package.json')); diff --git a/bin/build-pouchdb.js b/bin/build-pouchdb.js index 94fbfddc80..259fc7fdc2 100755 --- a/bin/build-pouchdb.js +++ b/bin/build-pouchdb.js @@ -27,7 +27,7 @@ var builtInModules = require('builtin-modules'); var external = Object.keys(require('../package.json').dependencies) .concat(builtInModules); -var plugins = ['indexeddb', 'localstorage', 'memory', 'find']; +var plugins = ['indexeddb', 'find']; var currentYear = new Date().getFullYear(); @@ -42,24 +42,6 @@ var comments = { 'indexeddb': '// PouchDB indexeddb plugin ' + version + '\n', - 'memory': '// PouchDB in-memory plugin ' + version + - '\n// Based on MemDOWN: https://github.com/rvagg/memdown' + - '\n// ' + - '\n// (c) 2012-' + currentYear + ' Dale Harvey and the PouchDB team' + - '\n// PouchDB may be freely distributed under the Apache license, ' + - 'version 2.0.' + - '\n// For all details and documentation:' + - '\n// http://pouchdb.com\n', - - 'localstorage': '// PouchDB localStorage plugin ' + version + - '\n// Based on localstorage-down: https://github.com/No9/localstorage-down' + - '\n// ' + - '\n// (c) 2012-' + currentYear + ' Dale Harvey and the PouchDB team' + - '\n// PouchDB may be freely distributed under the Apache license, ' + - 'version 2.0.' + - '\n// For all details and documentation:' + - '\n// http://pouchdb.com\n', - 'find': '// pouchdb-find plugin ' + version + '\n// Based on Mango: https://github.com/cloudant/mango' + '\n// ' + diff --git a/bin/run-test.sh b/bin/run-test.sh index 96fb3202c3..909acbcde4 100755 --- a/bin/run-test.sh +++ b/bin/run-test.sh @@ -16,8 +16,15 @@ fi : "${CLIENT:=node}" : "${COUCH_HOST:=http://127.0.0.1:5984}" -: "${VIEW_ADAPTERS:=memory}" -export VIEW_ADAPTERS + +if [ -z "$VIEW_ADAPTERS" ]; then + if [ "$CLIENT" == "node" ]; then + export VIEW_ADAPTERS="nodesqlite" + else + export VIEW_ADAPTERS="indexeddb" + fi +fi + pouchdb-setup-server() { # in CI, link pouchdb-servers dependencies on pouchdb diff --git a/bin/verify-dependencies.js b/bin/verify-dependencies.js index b63579a538..1bf7f8f313 100755 --- a/bin/verify-dependencies.js +++ b/bin/verify-dependencies.js @@ -25,12 +25,10 @@ getReqs('pouchdb/lib/index.js').should.not.contain('pouchdb-core'); getReqs('pouchdb/lib/index-browser.js').should.not.contain('vm'); getReqs('pouchdb/lib/index-browser.js').should.not.contain('pouchdb-mapreduce'); getReqs('pouchdb/lib/index-browser.js').should.not.contain('pouchdb'); -getReqs('pouchdb/lib/index-browser.js').should.not.contain('leveldown'); getReqs('pouchdb/lib/index-browser.js').should.not.contain('pouchdb-core'); // pouchdb-node and pouchdb-browser are also aggressively bundled getReqs('pouchdb-node/lib/index.js').should.not.contain('pouchdb-core'); -getReqs('pouchdb-node/lib/index.js').should.contain('leveldown'); getReqs('pouchdb-browser/lib/index.js').should.not.contain('pouchdb-core'); // pouchdb-for-coverage is super-duper aggressively bundled diff --git a/docs/_includes/api/batch_fetch.html b/docs/_includes/api/batch_fetch.html index ce970a9407..5ae0a481d7 100644 --- a/docs/_includes/api/batch_fetch.html +++ b/docs/_includes/api/batch_fetch.html @@ -17,7 +17,7 @@ * `options.startkey` & `options.endkey`: Get documents with IDs in a certain range (inclusive/inclusive). * `options.inclusive_end`: Include documents having an ID equal to the given `options.endkey`. Default: `true`. * `options.limit`: Maximum number of documents to return. -* `options.skip`: Number of docs to skip before returning (warning: poor performance on IndexedDB/LevelDB!). +* `options.skip`: Number of docs to skip before returning (warning: poor performance on IndexedDB!). * `options.descending`: Reverse the order of the output documents. Note that the order of `startkey` and `endkey` is reversed when `descending`:`true`. * `options.key`: Only return documents with IDs matching this string key. * `options.keys`: Array of string keys to fetch in a single shot. diff --git a/docs/_includes/api/create_database.html b/docs/_includes/api/create_database.html index f29d88f2e7..72de7a0dfb 100644 --- a/docs/_includes/api/create_database.html +++ b/docs/_includes/api/create_database.html @@ -13,7 +13,7 @@ **Options for local databases:** * `auto_compaction`: This turns on auto compaction, which means `compact()` is called after every change to the database. Defaults to `false`. -* `adapter`: One of `'indexeddb'`, `'idb'`, `'leveldb'`, or `'http'`. +* `adapter`: One of `'indexeddb'`, `'idb'`, `'nodesqlite'`, or `'http'`. * `revs_limit`: Specify how many old revisions we keep track (not a copy) of. Specifying a low value means Pouch may not be able to figure out whether a new revision received via replication is related to any it currently has which could result in a conflict. Defaults to `1000`. * `deterministic_revs`: Use a md5 hash to create a deterministic revision number for documents. Setting it to false will mean that the revision number will be a random UUID. Defaults to true. * `view_update_changes_batch_size`: Specify how many change records will be consumed at a time when rebuilding view indexes when the `query()` method is used. Defaults to 50. @@ -30,12 +30,9 @@ 1. In IndexedDB PouchDB will use `_pouch_` to prefix the internal database names. Do not manually create databases with the same prefix. 2. When acting as a client on Node, any other options given will be passed to [request][]. -3. When using the `'leveldb'` adapter (the default on Node), any other options given will be passed to [levelup][]. -4. If you are using the jwt auth handler, please use the fetch option to add the required headers and handle connected logic like token refreshes. +3. If you are using the jwt auth handler, please use the fetch option to add the required headers and handle connected logic like token refreshes. [request]: https://github.com/mikeal/request -[levelup]: https://github.com/rvagg/node-levelup -[levelup_options]: https://github.com/rvagg/node-levelup/#options #### Example Usage: {% highlight "js" %} diff --git a/docs/_includes/api/database_information.html b/docs/_includes/api/database_information.html index a37926fd95..e073994411 100644 --- a/docs/_includes/api/database_information.html +++ b/docs/_includes/api/database_information.html @@ -54,6 +54,6 @@ There are also some details you can use for debugging. These are unofficial and may change at any time: -* `adapter`: The name of the adapter being used (idb, leveldb, ...). +* `adapter`: The name of the adapter being used (idb, ...). * `idb_attachment_format`: (IndexedDB) either `'base64'` or `'binary'`, depending on whether the browser [supports binary blobs](/faq.html#data_types). * `backend_adapter`: (Node.JS) the backend *DOWN adapter being used (MemDOWN, RiakDOWN, ...). diff --git a/docs/_includes/api/defaults.html b/docs/_includes/api/defaults.html index 8012d10cdd..aa2e4713d5 100644 --- a/docs/_includes/api/defaults.html +++ b/docs/_includes/api/defaults.html @@ -23,7 +23,7 @@ const MyPrefixedPouch = PouchDB.defaults({ prefix: '/path/to/my/db/' }); -// db will be named '/path/to/my/db/dbname', useful for LevelDB +// db will be named '/path/to/my/db/dbname' const myPrefixedPouch = new MyPrefixedPouch('dbname'); const HTTPPouch = PouchDB.defaults({ diff --git a/docs/_includes/api/query_database.html b/docs/_includes/api/query_database.html index 0895330a56..999a864946 100644 --- a/docs/_includes/api/query_database.html +++ b/docs/_includes/api/query_database.html @@ -41,7 +41,7 @@ * `options.startkey` & `options.endkey`: Get rows with keys in a certain range (inclusive/inclusive). * `options.inclusive_end`: Include rows having a key equal to the given `options.endkey`. Default: `true`. * `options.limit`: Maximum number of rows to return. -* `options.skip`: Number of rows to skip before returning (warning: poor performance on IndexedDB/LevelDB!). +* `options.skip`: Number of rows to skip before returning (warning: poor performance on IndexedDB!). * `options.descending`: Reverse the order of the output rows. * `options.key`: Only return rows matching this key. * `options.keys`: Array of keys to fetch in a single shot. diff --git a/docs/_layouts/default.html b/docs/_layouts/default.html index 63670b0804..e5056183e2 100644 --- a/docs/_layouts/default.html +++ b/docs/_layouts/default.html @@ -122,20 +122,6 @@

{{ sub_title}}

-
- - Node Levelup - -
-
adapters @@ -66,127 +66,17 @@ The built-in IndexedDB adapter is nearly always more performant and stable. PouchDB also offers separate browser plugins that use backends other than IndexedDB. These plugins fully pass the PouchDB test suite and are rigorously tested in our CI process. -**Downloads:** - -* [pouchdb.memory.js](https://github.com/apache/pouchdb/releases/download/{{ site.version }}/pouchdb.memory.js) (Minified: [pouchdb.memory.min.js](https://github.com/apache/pouchdb/releases/download/{{ site.version }}/pouchdb.memory.min.js)) -* [pouchdb.localstorage.js](https://github.com/apache/pouchdb/releases/download/{{ site.version }}/pouchdb.localstorage.js) (Minified: [pouchdb.localstorage.min.js](https://github.com/apache/pouchdb/releases/download/{{ site.version }}/pouchdb.localstorage.min.js)) - -{% include alert/start.html variant="warning"%} -{% markdown %} -These plugins add a hefty footprint due to external dependencies, so take them with a grain of salt. -{% endmarkdown %} -{% include alert/end.html%} - -#### In-memory adapter - -If you want a quick database for your unit tests, you can use the `pouchdb.memory.js` plugin, which offers a pure in-memory PouchDB: - -```html - - - -``` - -This pouch will act exactly like a normal one – replicating, storing attachments, pagination, etc. – but it will be deleted as soon as the user closes their browser. However, multiple `PouchDB` objects with the same database name will share the same data: - -```js -// pouch1 and pouch2 will share the same data -const pouch1 = new PouchDB('myDB', {adapter: 'memory'}); -const pouch2 = new PouchDB('myDB', {adapter: 'memory'}); - -// pouch3 will have its own data -const pouch3 = new PouchDB('myOtherDB', {adapter: 'memory'}); -``` - -#### LocalStorage adapter - -If you need to support very old browsers, such as IE ≤ 9.0 and Opera Mini, you can use the `pouchdb.localstorage.js` plugin, which allows PouchDB to fall back to [LocalStorage][] on browsers that don't support either IndexedDB or WebSQL. The [es5-shims][] will also be necessary. - -```html - - - -``` - -{% include alert/start.html variant="warning"%} -The LocalStorage plugin should be considered highly experimental, and the underlying structure may change in the future. Currently it stores all document IDs in memory, which works fine on small databases but may crash on larger databases. You can follow localstorage-down to track our progress. -{% include alert/end.html %} - -{% include anchor.html title="PouchDB in Node.js" hash="pouchdb_in_node_js"%} - -#### In-memory - -Just as in the browser, you can also create a pure in-memory PouchDB: - -``` -$ npm install pouchdb-adapter-memory -``` - -then: - -```js -PouchDB.plugin(require('pouchdb-adapter-memory')); -const pouch = new PouchDB('myDB', {adapter: 'memory'}); -``` - -This implementation is based on [MemDOWN](https://github.com/level/memdown), and will not write any changes to disk. - #### Node SQLite adapter -You can also use PouchDB over [SQLite3](https://github.com/mapbox/node-sqlite3) in Node, using the WebSQL adapter and -[node-websql](https://github.com/nolanlawson/node-websql): +You can also use PouchDB in Node.js' [native SQLite module](https://nodejs.org/api/sqlite.html), when using Node.js' `>22.5.0` version. ```js const PouchDB = require('pouchdb'); -PouchDB.plugin(require('pouchdb-adapter-node-websql')); - -const db = new PouchDB('mydatabase.db', {adapter: 'websql'}); -``` - -In this case, PouchDB is directly using SQLite queries to build the database, exactly as the WebSQL adapter would. - -See ["Prebuilt databases with PouchDB"]({{ site.baseurl }}/2016/04/28/prebuilt-databases-with-pouchdb.html) -for a guide to how you might use this adapter to create prebuilt SQLite database files for adapters such as Cordova or Electron. +PouchDB.plugin(require('pouchdb-adapter-node-sqlite')); -#### Other LevelDOWN adapters - -Technically you are free to use -[any LevelDOWN-based implementation](https://github.com/rvagg/node-levelup/wiki/Modules#storage-back-ends) in either Node or the browser. -However this should be considered **extremely experimental** and not designed for production use. - -See [pouchdb-adapter-leveldb-core](https://www.npmjs.com/package/pouchdb-adapter-leveldb-core) for details. - -{% include anchor.html title="PouchDB over HTTP" hash="pouchdb_over_http"%} - -In both the browser and in Node.js, PouchDB can also function as a straightforward API on top of any [CouchDB](https://couchdb.apache.org/)-compliant database: - -```js -const pouch = new PouchDB('http://my-site.com:5984/my-db'); -const securePouch = new PouchDB('https://my-secure-site.com:5984/my-secure-db'); +const db = new PouchDB('mydatabase.db', {adapter: 'nodesqlite'}); ``` -You can also sync to and from these databases to your local PouchDB. - -Currently PouchDB has full support for: - -* CouchDB 1.x -* [Smileupps](https://www.smileupps.com/) (same as 1.x) -* CouchDB 2.x ([tested in CI](https://github.com/apache/pouchdb/actions)) -* CouchDB 3.x ([tested in CI](https://github.com/apache/pouchdb/actions)) -* [Cloudant](https://cloudant.com/) (roughly the same as 2.x) -* [PouchDB Server](https://github.com/pouchdb/pouchdb-server) ([tested in CI](https://github.com/apache/pouchdb/actions)) -* [PouchDB Server --in-memory mode](https://github.com/pouchdb/pouchdb-server) - -[Drupal 8](http://wearepropeople.com/blog/a-content-staging-solution-for-drupal-8-and-more) has also announced support for PouchDB, and there is [rcouch](https://github.com/rcouch/rcouch) as well, but these are both untested by PouchDB. - -If you are ever unsure about a server, consider replicating from PouchDB to CouchDB, then from that CouchDB to the other server. - #### PouchDB Server [PouchDB Server](https://github.com/pouchdb/pouchdb-server) is a standalone REST server that implements the CouchDB API, while using a LevelDB-based PouchDB under the hood. It also supports an `--in-memory` mode and any [LevelDOWN][] adapter, which you may find handy. @@ -207,7 +97,6 @@ The best place to look for information on which browsers support which databases [IndexedDB]: http://www.w3.org/TR/IndexedDB/ [WebSQL]: http://www.w3.org/TR/webdatabase/ -[LevelDB]: https://code.google.com/p/leveldb/ [LocalStorage]: https://developer.mozilla.org/en-US/docs/Web/Guide/API/DOM/Storage [es5-shims]: https://github.com/es-shims/es5-shim [sqlite plugin]: https://github.com/brodysoft/Cordova-SQLitePlugin diff --git a/docs/custom.md b/docs/custom.md index fd48700f1d..021c06f9b1 100644 --- a/docs/custom.md +++ b/docs/custom.md @@ -60,7 +60,7 @@ for the browser. In particular, it ships with the IndexedDB adapter as its default adapter. It also contains the replication, HTTP, and map/reduce plugins. Use this preset if you only want to use PouchDB in the browser, -and don't want to use it in Node.js. (E.g. to avoid installing LevelDB.) +and don't want to use it in Node.js. (E.g. to avoid installing nodesqlite.) #### Example Usage @@ -86,7 +86,7 @@ const PouchDB = require('pouchdb-core') ### [pouchdb-node](https://npmjs.org/package/pouchdb-node) The `pouchdb-node` preset contains the version of PouchDB that is designed for -Node.js. In particular, it uses the LevelDB adapter and doesn't ship with the +Node.js. In particular, it uses the nodesqlite adapter and doesn't ship with the IndexedDB or WebSQL adapters. It also contains the replication, HTTP, and map/reduce plugins. Use this preset if you are only using PouchDB in Node, and not in the browser. @@ -106,7 +106,6 @@ const db = new PouchDB('mydb'); ```js const PouchDB = require('pouchdb-core') - .plugin(require('pouchdb-adapter-leveldb')) .plugin(require('pouchdb-adapter-http')) .plugin(require('pouchdb-mapreduce')) .plugin(require('pouchdb-replication')); @@ -133,7 +132,7 @@ PouchDB.plugin(/* attach plugins to make me more interesting! */); Plugins contain functionality that can be added to a `PouchDB` instance using `PouchDB.plugin()`. There are many [third-party plugins](/external.html), but the ones described below are first-party plugins, which are given the same level of support as PouchDB itself. Some first-party plugins are included in the default `pouchdb` build, whereas others aren't. -There is also a special type of plugin called an _adapter plugin_. Adapter plugins (such as IndexedDB, WebSQL, LevelDB, and HTTP) determine the storage format that +There is also a special type of plugin called an _adapter plugin_. Adapter plugins (such as IndexedDB, WebSQL, and HTTP) determine the storage format that PouchDB uses. For the non-HTTP adapters, the plugin order matters, i.e. if you want IndexedDB to be preferred to WebSQL, then you should load it first. @@ -173,23 +172,6 @@ const db = new PouchDB('mydb', {adapter: 'websql'}); console.log(db.adapter); // 'websql' ``` -### [pouchdb-adapter-leveldb](https://npmjs.org/package/pouchdb-adapter-leveldb) - -The primary adapter used by PouchDB in Node.js, using LevelDB. The adapter name -is `'leveldb'`. - -#### Example usage - -```bash -npm install pouchdb-adapter-leveldb -``` - -```js -PouchDB.plugin(require('pouchdb-adapter-leveldb')); -const db = new PouchDB('mydb', {adapter: 'leveldb'}); -console.log(db.adapter); // 'leveldb' -``` - ### [pouchdb-adapter-http](https://npmjs.org/package/pouchdb-adapter-http) The primary adapter used by PouchDB in both Node.js and the browser for communicating @@ -211,38 +193,21 @@ const db = new PouchDB('http://127.0.0.1:5984/mydb'); console.log(db.adapter); // 'http' ``` -### [pouchdb-adapter-memory](https://npmjs.org/package/pouchdb-adapter-memory) - -An optional adapter that works in the browser and Node.js, fully in-memory. The adapter name -is `'memory'`. - -#### Example usage - -```bash -npm install pouchdb-adapter-memory -``` - -```js -PouchDB.plugin(require('pouchdb-adapter-memory')); -const db = new PouchDB('mydb', {adapter: 'memory'}); -console.log(db.adapter); // 'memory' -``` - -### [pouchdb-adapter-localstorage](https://npmjs.org/package/pouchdb-adapter-localstorage) +### [pouchdb-adapter-fruitdown](https://npmjs.org/package/pouchdb-adapter-fruitdown) -An optional adapter that works in the browser using LocalStorage. The adapter name -is `'localstorage'`. +An optional adapter that works in the browser using IndexedDB via [fruitdown](https://github.com/nolanlawson/fruitdown). The adapter name +is `'fruitdown'`. #### Example usage ```bash -npm install pouchdb-adapter-localstorage +npm install pouchdb-adapter-fruitdown ``` ```js -PouchDB.plugin(require('pouchdb-adapter-localstorage')); -const db = new PouchDB('mydb', {adapter: 'localstorage'}); -console.log(db.adapter); // 'localstorage' +PouchDB.plugin(require('pouchdb-adapter-fruitdown')); +const db = new PouchDB('mydb', {adapter: 'fruitdown'}); +console.log(db.adapter); // 'fruitdown' ``` ### [pouchdb-adapter-node-websql](https://npmjs.org/package/pouchdb-adapter-node-websql) diff --git a/docs/faq.md b/docs/faq.md index b277e6d58e..dd00fd74f7 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -63,7 +63,7 @@ PouchDB has two types of data: documents and attachments. As in CouchDB, the documents you store must be serializable as JSON. Modifying the `Object` prototype or storing classes is not supported. -IndexedDB will actually support non-JSON data (e.g. `Date`s aren't stringified), but you should not rely on this, because CouchDB, LevelDB, and Web SQL do not behave the same. +IndexedDB will actually support non-JSON data (e.g. `Date`s aren't stringified), but you should not rely on this, because CouchDB and Web SQL do not behave the same. #### Attachments diff --git a/docs/guides/databases.md b/docs/guides/databases.md index bab8a1adb4..3ade28fe17 100644 --- a/docs/guides/databases.md +++ b/docs/guides/databases.md @@ -130,7 +130,7 @@ In Safari, you can simply click *Safari* → *Clear History and Website Data {% include anchor.html title="Differences between the local and remote databases" hash="differences-between-the-local-and-remote-databases" %} -When you create a local PouchDB database, it uses whatever underlying datastore is available - IndexedDB in most browsers, WebSQL in older browsers, and LevelDB in Node.js. +When you create a local PouchDB database, it uses whatever underlying datastore is available - IndexedDB in most browsers, WebSQL in older browsers, and nodesqlite in Node.js. When you create a remote PouchDB database, it communicates directly with the remote database – CouchDB, Cloudant, Couchbase, etc. diff --git a/docs/learn.md b/docs/learn.md index 6535eb7a6c..95d7da518c 100644 --- a/docs/learn.md +++ b/docs/learn.md @@ -29,10 +29,9 @@ PouchDB requires a modern ES5 environment, so if you need to support older brows {% include anchor.html class="h3" title="Node.js" hash="node_js" %} -In Node.js, PouchDB uses [LevelDB][] under the hood, and also supports [many other backends](/adapters.html#pouchdb_in_node_js) via the [LevelUP ecosystem](https://github.com/rvagg/node-levelup). +In Node.js, PouchDB uses [nodesqlite][] under the hood. PouchDB can also run as its own CouchDB-compatible web server, using [PouchDB Server](https://github.com/pouchdb/pouchdb-server). [IndexedDB]: http://caniuse.com/#feat=indexeddb [WebSQL]: http://caniuse.com/#feat=sql-storage -[LevelDB]: https://github.com/google/leveldb diff --git a/docs/posts/2026-04-10-migration-to-nodesqlite.md b/docs/posts/2026-04-10-migration-to-nodesqlite.md new file mode 100644 index 0000000000..3d651cdaa9 --- /dev/null +++ b/docs/posts/2026-04-10-migration-to-nodesqlite.md @@ -0,0 +1,57 @@ +--- +layout: post + +title: PouchDB has a new adapter - nodesqlite +author: Alba Herrerías + +--- + +Hello everyone! PouchDB has a new available adapter for you to use, `nodesqlite`, that internally uses Node.js's native SQLite module as its persistence layer. This will be, in PouchDB's release version 11.0.0, the default adapter for node environments, replacing the deprecated LevelDB ecosystem we currently depend on. You can read more about the details, discussion and timeline in this [issue](https://github.com/apache/pouchdb/issues/9163), and look at its implementation in this [pull request](https://github.com/apache/pouchdb/pull/9223). + +## Migration guide + +We suggest you to create a replication from your databases to new ones using the `nodesqlite` adapter. We have drafted a snippet you can copy and modify according to your needs. + +```js +async function getDb (name) { + const oldDb = new PouchDB(name, { adapter: 'leveldb' }) + + // create a new database with new nodesqlite adapter + const newDb = new PouchDB(name, { adapter: 'nodesqlite' }) + + // set up promise wrapped around replication + return new Promise((resolve, reject) => { + console.log('Started migrating to nodesqlite...') + + PouchDB.replicate(oldDb, newDb).on('complete', async () => { + // Do you want to remove the old database? + // If so, uncomment the following line: + // await oldDb.destroy() + + console.log('All done!') + resolve(newDb) + }).on('change', (info) => { + console.log(`Docs written: ${info.docs_written}` ) + }).on('denied', reject) + .on('error', reject) + }) +} +``` +Now, instead of getting your database like: +```js +const db = new PouchDB('my-db-name', { adapter: 'leveldb' }) + +// or like this, since `leveldb` is the default adapter in the Node.js environment +const db = new PouchDB('my-db-name') +``` + +Do it like this: +```js +const db = await getDb('my-db-name') +``` + +If you encounter a bug in this migration, please [file an issue](https://github.com/pouchdb/pouchdb/issues) and, ideally, modify this post for the benefit of others. Thanks! + +## Get in touch + +As always, we welcome feedback from the community. Please don't hesitate to [file issues](https://github.com/pouchdb/pouchdb/issues), [open discussions](https://github.com/pouchdb/pouchdb/discussions) or [get in touch](https://github.com/pouchdb/pouchdb/blob/master/CONTRIBUTING.md#get-in-touch). And of course, a big thanks to all of our [new and existing contributors](https://github.com/pouchdb/pouchdb/graphs/contributors)! diff --git a/docs/static/img/pouchdb_adapters.png b/docs/static/img/pouchdb_adapters.png index 548aa0c1da..02a193e7be 100644 Binary files a/docs/static/img/pouchdb_adapters.png and b/docs/static/img/pouchdb_adapters.png differ diff --git a/docs/static/svg/pouchdb_adapters.svg b/docs/static/svg/pouchdb_adapters.svg index 2cfee97a3b..7b751f4d9a 100644 --- a/docs/static/svg/pouchdb_adapters.svg +++ b/docs/static/svg/pouchdb_adapters.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 377df68636..4cac7631a8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,20 +9,11 @@ "version": "7.0.0-prerelease", "license": "Apache-2.0", "dependencies": { + "@neighbourhoodie/websql": "2.0.4", "double-ended-queue": "2.1.0-0", "fetch-cookie": "2.2.0", - "level": "6.0.1", - "level-codec": "9.0.2", - "level-write-stream": "1.0.0", - "leveldown": "6.1.1", - "levelup": "4.4.0", - "localstorage-down": "0.6.7", - "ltgt": "2.2.1", - "memdown": "1.4.1", "node-fetch": "2.6.9", - "readable-stream": "1.1.14", "spark-md5": "3.0.2", - "through2": "3.0.2", "uuid": "8.3.2", "vuvuzela": "1.0.3" }, @@ -575,6 +566,23 @@ "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", "dev": true }, + "node_modules/@neighbourhoodie/websql": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@neighbourhoodie/websql/-/websql-2.0.4.tgz", + "integrity": "sha512-vzVNcdLFq7D+fWm3ypC/pDLoOJOY3TuuMmDdHsy1cv5APVTou9kk7ifrd+u5QLlt9PThEv5BV+TzO03MgDI5Gg==", + "license": "Apache-2.0", + "dependencies": { + "immediate": "^3.2.2", + "noop-fn": "^1.0.0", + "tiny-queue": "^0.2.1" + } + }, + "node_modules/@neighbourhoodie/websql/node_modules/tiny-queue": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tiny-queue/-/tiny-queue-0.2.1.tgz", + "integrity": "sha512-EijGsv7kzd9I9g0ByCl6h42BWNGUZrlCSejfrb3AKeHC33SGbASu1VDf5O3rRiiUOhAC9CHdZxFPbZu0HmR70A==", + "license": "Apache 2" + }, "node_modules/@sindresorhus/slugify": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/@sindresorhus/slugify/-/slugify-2.2.1.tgz", @@ -668,21 +676,6 @@ "integrity": "sha512-LEyx4aLEC3x6T0UguF6YILf+ntvmOaWsVfENmIW0E9H09vKlLDGelMjjSm0jkDHALj8A8quZ/HapKNigzwge+Q==", "dev": true }, - "node_modules/abstract-leveldown": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", - "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", - "dependencies": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -882,7 +875,8 @@ "node_modules/argsarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/argsarray/-/argsarray-0.0.1.tgz", - "integrity": "sha512-u96dg2GcAKtpTrBdDoFIM7PjcBA+6rSP0OR94MOReNRyUECL6MtQt5XXmRr4qrftYaef9+l5hcpO5te7sML1Cg==" + "integrity": "sha512-u96dg2GcAKtpTrBdDoFIM7PjcBA+6rSP0OR94MOReNRyUECL6MtQt5XXmRr4qrftYaef9+l5hcpO5te7sML1Cg==", + "dev": true }, "node_modules/array-buffer-byte-length": { "version": "1.0.1", @@ -1286,6 +1280,7 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, "funding": [ { "type": "github", @@ -1978,6 +1973,7 @@ "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, "funding": [ { "type": "github", @@ -2113,14 +2109,6 @@ "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", "dev": true }, - "node_modules/catering": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/catering/-/catering-2.1.1.tgz", - "integrity": "sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w==", - "engines": { - "node": ">=6" - } - }, "node_modules/chai": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", @@ -2222,6 +2210,16 @@ "wrap-ansi": "^6.2.0" } }, + "node_modules/clone-buffer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz", + "integrity": "sha512-KLLTJWrvwIP+OPfMn0x2PheDEP20RPUcGXj/ERegTgdmPEZylALQldygiqrPPu8P45uNuPs7ckmReLY6v/iA5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -2598,7 +2596,8 @@ "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true }, "node_modules/corser": { "version": "2.0.1", @@ -2769,11 +2768,6 @@ "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==", "dev": true }, - "node_modules/d64": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", - "integrity": "sha512-5eNy3WZziVYnrogqgXhcdEmqcDB2IHurTqLcrgssJsfkMVCUoUaZpK6cJjxxvLV2dUm5SuJMNcYfVGoin9UIRw==" - }, "node_modules/dash-ast": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/dash-ast/-/dash-ast-1.0.0.tgz", @@ -2911,18 +2905,6 @@ "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, - "node_modules/deferred-leveldown": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz", - "integrity": "sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw==", - "dependencies": { - "abstract-leveldown": "~6.2.1", - "inherits": "^2.0.3" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/define-data-property": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", @@ -3354,20 +3336,6 @@ "iconv-lite": "^0.6.2" } }, - "node_modules/encoding-down": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-6.3.0.tgz", - "integrity": "sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw==", - "dependencies": { - "abstract-leveldown": "^6.2.1", - "inherits": "^2.0.3", - "level-codec": "^9.0.0", - "level-errors": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/encoding/node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -3389,14 +3357,6 @@ "once": "^1.4.0" } }, - "node_modules/end-stream": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/end-stream/-/end-stream-0.1.0.tgz", - "integrity": "sha512-Brl10T8kYnc75IepKizW6Y9liyW8ikz1B7n/xoHrJxoVSSjoqPn30sb7XVFfQERK4QfUMYRGs9dhWwtt2eu6uA==", - "dependencies": { - "write-stream": "~0.4.3" - } - }, "node_modules/entities": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", @@ -3423,6 +3383,8 @@ "version": "0.1.8", "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "dev": true, + "optional": true, "dependencies": { "prr": "~1.0.1" }, @@ -4547,7 +4509,8 @@ "node_modules/functional-red-black-tree": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==" + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", + "dev": true }, "node_modules/functions-have-names": { "version": "1.2.3", @@ -4896,11 +4859,6 @@ "node": ">=8" } }, - "node_modules/has-localstorage": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-localstorage/-/has-localstorage-1.0.1.tgz", - "integrity": "sha512-0M88QdpTOjOAkVqI7sMGt7hTaVnLppr0sjisFagm+Q9gI3Mmkeqpqu/+RZS4a1doIJ90lmL1WofDMbNB7f+F7Q==" - }, "node_modules/has-property-descriptors": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", @@ -5184,15 +5142,6 @@ "integrity": "sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg==", "dev": true }, - "node_modules/humble-localstorage": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/humble-localstorage/-/humble-localstorage-1.4.2.tgz", - "integrity": "sha512-INHaIq55BGlrVCSYw11FxRHQAAQVBO5i1t8kREy778DLMVrgaASCujBwZa2e2+n70sUczlBu5Tjie5iYp1JVeQ==", - "dependencies": { - "has-localstorage": "^1.0.1", - "localstorage-memory": "^1.0.1" - } - }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -5209,6 +5158,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, "funding": [ { "type": "github", @@ -5289,7 +5239,8 @@ "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true }, "node_modules/inline-source-map": { "version": "0.6.2", @@ -6154,254 +6105,6 @@ "dev": true, "license": "0BSD" }, - "node_modules/level": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/level/-/level-6.0.1.tgz", - "integrity": "sha512-psRSqJZCsC/irNhfHzrVZbmPYXDcEYhA5TVNwr+V92jF44rbf86hqGp8fiT702FyiArScYIlPSBTDUASCVNSpw==", - "dependencies": { - "level-js": "^5.0.0", - "level-packager": "^5.1.0", - "leveldown": "^5.4.0" - }, - "engines": { - "node": ">=8.6.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/level" - } - }, - "node_modules/level-codec": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-9.0.2.tgz", - "integrity": "sha512-UyIwNb1lJBChJnGfjmO0OR+ezh2iVu1Kas3nvBS/BzGnx79dv6g7unpKIDNPMhfdTEGoc7mC8uAu51XEtX+FHQ==", - "dependencies": { - "buffer": "^5.6.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==", - "engines": { - "node": ">=6" - } - }, - "node_modules/level-errors": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", - "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", - "dependencies": { - "errno": "~0.1.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/level-iterator-stream": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz", - "integrity": "sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q==", - "dependencies": { - "inherits": "^2.0.4", - "readable-stream": "^3.4.0", - "xtend": "^4.0.2" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/level-iterator-stream/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/level-js": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/level-js/-/level-js-5.0.2.tgz", - "integrity": "sha512-SnBIDo2pdO5VXh02ZmtAyPP6/+6YTJg2ibLtl9C34pWvmtMEmRTWpra+qO/hifkUtBTOtfx6S9vLDjBsBK4gRg==", - "dependencies": { - "abstract-leveldown": "~6.2.3", - "buffer": "^5.5.0", - "inherits": "^2.0.3", - "ltgt": "^2.1.2" - } - }, - "node_modules/level-packager": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-5.1.1.tgz", - "integrity": "sha512-HMwMaQPlTC1IlcwT3+swhqf/NUO+ZhXVz6TY1zZIIZlIR0YSn8GtAAWmIvKjNY16ZkEg/JcpAuQskxsXqC0yOQ==", - "dependencies": { - "encoding-down": "^6.3.0", - "levelup": "^4.3.2" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "dependencies": { - "xtend": "^4.0.2" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/level-write-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/level-write-stream/-/level-write-stream-1.0.0.tgz", - "integrity": "sha512-bBNKOEOMl8msO+uIM9YX/gUO6ckokZ/4pCwTm/lwvs46x6Xs8Zy0sn3Vh37eDqse4mhy4fOMIb/JsSM2nyQFtw==", - "dependencies": { - "end-stream": "~0.1.0" - } - }, - "node_modules/level/node_modules/leveldown": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/leveldown/-/leveldown-5.6.0.tgz", - "integrity": "sha512-iB8O/7Db9lPaITU1aA2txU/cBEXAt4vWwKQRrrWuS6XDgbP4QZGj9BL2aNbwb002atoQ/lIotJkfyzz+ygQnUQ==", - "hasInstallScript": true, - "dependencies": { - "abstract-leveldown": "~6.2.1", - "napi-macros": "~2.0.0", - "node-gyp-build": "~4.1.0" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/leveldown": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/leveldown/-/leveldown-6.1.1.tgz", - "integrity": "sha512-88c+E+Eizn4CkQOBHwqlCJaTNEjGpaEIikn1S+cINc5E9HEvJ77bqY4JY/HxT5u0caWqsc3P3DcFIKBI1vHt+A==", - "hasInstallScript": true, - "dependencies": { - "abstract-leveldown": "^7.2.0", - "napi-macros": "~2.0.0", - "node-gyp-build": "^4.3.0" - }, - "engines": { - "node": ">=10.12.0" - } - }, - "node_modules/leveldown/node_modules/abstract-leveldown": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-7.2.0.tgz", - "integrity": "sha512-DnhQwcFEaYsvYDnACLZhMmCWd3rkOeEvglpa4q5i/5Jlm3UIsWaxVzuXvDLFCSCWRO3yy2/+V/G7FusFgejnfQ==", - "dependencies": { - "buffer": "^6.0.3", - "catering": "^2.0.0", - "is-buffer": "^2.0.5", - "level-concat-iterator": "^3.0.0", - "level-supports": "^2.0.1", - "queue-microtask": "^1.2.3" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/leveldown/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/leveldown/node_modules/is-buffer": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", - "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "engines": { - "node": ">=4" - } - }, - "node_modules/leveldown/node_modules/level-concat-iterator": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-3.1.0.tgz", - "integrity": "sha512-BWRCMHBxbIqPxJ8vHOvKUsaO0v1sLYZtjN3K2iZJsRBYtp+ONsY6Jfi6hy9K3+zolgQRryhIn2NRZjZnWJ9NmQ==", - "dependencies": { - "catering": "^2.1.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/leveldown/node_modules/level-supports": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-2.1.0.tgz", - "integrity": "sha512-E486g1NCjW5cF78KGPrMDRBYzPuueMZ6VBXHT6gC7A8UYWGiM14fGgp+s/L1oFfDWSPV/+SFkYCmZ0SiESkRKA==", - "engines": { - "node": ">=10" - } - }, - "node_modules/leveldown/node_modules/node-gyp-build": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.0.tgz", - "integrity": "sha512-u6fs2AEUljNho3EYTJNBfImO5QTo/J/1Etd+NVdCj7qWKUSN/bSLkZwhDv7I+w/MSC6qJ4cknepkAYykDdK8og==", - "bin": { - "node-gyp-build": "bin.js", - "node-gyp-build-optional": "optional.js", - "node-gyp-build-test": "build-test.js" - } - }, - "node_modules/levelup": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/levelup/-/levelup-4.4.0.tgz", - "integrity": "sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ==", - "dependencies": { - "deferred-leveldown": "~5.3.0", - "level-errors": "~2.0.0", - "level-iterator-stream": "~4.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/levn": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", @@ -6478,46 +6181,6 @@ "dev": true, "license": "MIT" }, - "node_modules/localstorage-down": { - "version": "0.6.7", - "resolved": "https://registry.npmjs.org/localstorage-down/-/localstorage-down-0.6.7.tgz", - "integrity": "sha512-FICPps7r0bhe4J4723TVjxkkP/9SgIn95zWC8M6isvibErHHHyrm1gryMXpqyjSd/PaOcdKICvVaJPWW3OWcZQ==", - "dependencies": { - "abstract-leveldown": "0.12.3", - "argsarray": "0.0.1", - "buffer-from": "^0.1.1", - "d64": "^1.0.0", - "humble-localstorage": "^1.4.2", - "inherits": "^2.0.1", - "tiny-queue": "0.2.0" - } - }, - "node_modules/localstorage-down/node_modules/abstract-leveldown": { - "version": "0.12.3", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-0.12.3.tgz", - "integrity": "sha512-2XjIA9DFg1Cj2mVm/SmeJ2NIEt/6PRThyHk13ZyVyiZBSYwbEbGMcyt8uEFDlQByYwtBonFOPC0VpxjKVUqJXQ==", - "dependencies": { - "xtend": "~3.0.0" - } - }, - "node_modules/localstorage-down/node_modules/buffer-from": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-0.1.2.tgz", - "integrity": "sha512-RiWIenusJsmI2KcvqQABB83tLxCByE3upSP8QU3rJDMVFGPWLvPQJt/O1Su9moRWeH7d+Q2HYb68f6+v+tw2vg==" - }, - "node_modules/localstorage-down/node_modules/xtend": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-3.0.0.tgz", - "integrity": "sha512-sp/sT9OALMjRW1fKDlPeuSZlDQpkqReA0pyJukniWbTGoEKefHxhGJynE3PNhUMlcM8qWIjPwecwCw4LArS5Eg==", - "engines": { - "node": ">=0.4" - } - }, - "node_modules/localstorage-memory": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/localstorage-memory/-/localstorage-memory-1.0.3.tgz", - "integrity": "sha512-t9P8WB6DcVttbw/W4PIE8HOqum8Qlvx5SjR6oInwR9Uia0EEmyUeBh7S+weKByW+l/f45Bj4L/dgZikGFDM6ng==" - }, "node_modules/locate-path": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", @@ -6565,11 +6228,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ltgt": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ltgt/-/ltgt-2.2.1.tgz", - "integrity": "sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA==" - }, "node_modules/luxon": { "version": "3.7.2", "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.2.tgz", @@ -6702,32 +6360,6 @@ "integrity": "sha512-yrVT3CsP0Lqm1xuKjGHdRVt4va3rQu3UCidWboXlwnsflxRUHTKQIatvSQML8n32jH6XzY2jrdClYMUOkdrBHQ==", "dev": true }, - "node_modules/memdown": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/memdown/-/memdown-1.4.1.tgz", - "integrity": "sha512-iVrGHZB8i4OQfM155xx8akvG9FIj+ht14DX5CQkCTG4EHzZ3d3sgckIf/Lm9ivZalEsFuEVnWv2B2WZvbrro2w==", - "dependencies": { - "abstract-leveldown": "~2.7.1", - "functional-red-black-tree": "^1.0.1", - "immediate": "^3.2.3", - "inherits": "~2.0.1", - "ltgt": "~2.2.0", - "safe-buffer": "~5.1.1" - } - }, - "node_modules/memdown/node_modules/abstract-leveldown": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-2.7.2.tgz", - "integrity": "sha512-+OVvxH2rHVEhWLdbudP6p0+dNMXu8JA1CbhP19T8paTYAcX7oJ4OVjT+ZUVpv7mITxXHqDMej+GdqXBmXkw09w==", - "dependencies": { - "xtend": "~4.0.0" - } - }, - "node_modules/memdown/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, "node_modules/merge-descriptors": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", @@ -7227,11 +6859,6 @@ "node": ">= 0.6" } }, - "node_modules/napi-macros": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/napi-macros/-/napi-macros-2.0.0.tgz", - "integrity": "sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg==" - }, "node_modules/native-request": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/native-request/-/native-request-1.1.2.tgz", @@ -7302,16 +6929,6 @@ } } }, - "node_modules/node-gyp-build": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.1.1.tgz", - "integrity": "sha512-dSq1xmcPDKPZ2EED2S6zw/b9NKsqzXRE6dVr8TVQnI3FJOTteUMuqF3Qqs6LZg+mLGYJWqQzMbIjMtJqTv87nQ==", - "bin": { - "node-gyp-build": "bin.js", - "node-gyp-build-optional": "optional.js", - "node-gyp-build-test": "build-test.js" - } - }, "node_modules/node-retrieve-globals": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/node-retrieve-globals/-/node-retrieve-globals-6.0.1.tgz", @@ -7374,6 +6991,12 @@ "node": "*" } }, + "node_modules/noop-fn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/noop-fn/-/noop-fn-1.0.0.tgz", + "integrity": "sha512-pQ8vODlgXt2e7A3mIbFDlizkr46r75V+BJxVAyat8Jl7YmI513gG5cfyRL0FedKraoZ+VAouI1h4/IWpus5pcQ==", + "license": "MIT" + }, "node_modules/nopt": { "version": "3.0.6", "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", @@ -8855,15 +8478,6 @@ "uuid": "8.3.2" } }, - "node_modules/pouchdb-utils/node_modules/clone-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz", - "integrity": "sha512-KLLTJWrvwIP+OPfMn0x2PheDEP20RPUcGXj/ERegTgdmPEZylALQldygiqrPPu8P45uNuPs7ckmReLY6v/iA5g==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, "node_modules/pouchdb-validation": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/pouchdb-validation/-/pouchdb-validation-4.2.0.tgz", @@ -8963,7 +8577,8 @@ "node_modules/prr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", - "integrity": "sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==" + "integrity": "sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==", + "dev": true }, "node_modules/ps-tree": { "version": "0.0.3", @@ -9065,25 +8680,6 @@ "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, "node_modules/random-bytes": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", @@ -9194,27 +8790,6 @@ "safe-buffer": "~5.1.0" } }, - "node_modules/readable-stream": { - "version": "1.1.14", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", - "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - } - }, - "node_modules/readable-stream/node_modules/isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" - }, - "node_modules/readable-stream/node_modules/string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" - }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -9640,6 +9215,7 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, "funding": [ { "type": "github", @@ -10388,6 +9964,7 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, "dependencies": { "safe-buffer": "~5.2.0" } @@ -10669,28 +10246,6 @@ "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", "dev": true }, - "node_modules/through2": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz", - "integrity": "sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ==", - "dependencies": { - "inherits": "^2.0.4", - "readable-stream": "2 || 3" - } - }, - "node_modules/through2/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/throw-max-listeners-error": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/throw-max-listeners-error/-/throw-max-listeners-error-1.0.1.tgz", @@ -10718,7 +10273,8 @@ "node_modules/tiny-queue": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/tiny-queue/-/tiny-queue-0.2.0.tgz", - "integrity": "sha512-ucfrvjzfbtc+xqmn95DEUtGcDHJHQgZ9IR0mizPOZBkY45reZDCJjafUGVJOGJassjn0MavTyWOCQcG+agpLxw==" + "integrity": "sha512-ucfrvjzfbtc+xqmn95DEUtGcDHJHQgZ9IR0mizPOZBkY45reZDCJjafUGVJOGJassjn0MavTyWOCQcG+agpLxw==", + "dev": true }, "node_modules/tinyglobby": { "version": "0.2.15", @@ -11287,7 +10843,8 @@ "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true }, "node_modules/util/node_modules/inherits": { "version": "2.0.3", @@ -11491,19 +11048,6 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "dev": true }, - "node_modules/write-stream": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/write-stream/-/write-stream-0.4.3.tgz", - "integrity": "sha512-IJrvkhbAnj89W/GAVdVgbnPiVw5Ntg/B4tc/MUCIEwj/g6JIww1DWJyB/yBMT3yw2/TkT6IUZ0+IYef3flEw8A==", - "dependencies": { - "readable-stream": "~0.0.2" - } - }, - "node_modules/write-stream/node_modules/readable-stream": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-0.0.4.tgz", - "integrity": "sha512-azrivNydKRYt7zwLV5wWUK7YzKTWs3q87xSmY6DlHapPrCvaT6ZrukvM5erV+yCSSPmZT8zkSdttOHQpWWm9zw==" - }, "node_modules/ws": { "version": "8.19.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", @@ -11563,6 +11107,7 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true, "engines": { "node": ">=0.4" } diff --git a/package.json b/package.json index fe3a9dbce3..dbcac6e44c 100644 --- a/package.json +++ b/package.json @@ -38,20 +38,11 @@ "dependencies": { "double-ended-queue": "2.1.0-0", "fetch-cookie": "2.2.0", - "level": "6.0.1", - "level-codec": "9.0.2", - "level-write-stream": "1.0.0", - "leveldown": "6.1.1", - "levelup": "4.4.0", - "localstorage-down": "0.6.7", - "ltgt": "2.2.1", - "memdown": "1.4.1", "node-fetch": "2.6.9", - "readable-stream": "1.1.14", "spark-md5": "3.0.2", - "through2": "3.0.2", "uuid": "8.3.2", - "vuvuzela": "1.0.3" + "vuvuzela": "1.0.3", + "@neighbourhoodie/websql": "2.0.4" }, "devDependencies": { "@11ty/eleventy": "3.1.2", diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/README.md b/packages/node_modules/pouchdb-adapter-leveldb-core/README.md deleted file mode 100644 index afd2cde754..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/README.md +++ /dev/null @@ -1,43 +0,0 @@ -pouchdb-adapter-leveldb-core ![semver non-compliant](https://img.shields.io/badge/semver-non--compliant-red.svg) -====== - -Underlying adapter code for LevelDOWN-based PouchDB adapters (such as `pouchdb-adapter-leveldb`). Most likely you should not use this package unless you are trying to build your own PouchDB adapter based on a *DOWN database. - -### Usage - -```bash -npm install --save-exact pouchdb-adapter-leveldb-core -``` - -### Overview - -`pouchdb-adapter-leveldb-core` is designed in such a way that you can substitute -[any LevelDOWN-based implementation](https://github.com/rvagg/node-levelup/wiki/Modules#storage-back-ends) and therefore -use PouchDB over RiakDOWN, RedisDOWN, etc. - -In practice, though, only a handful of LevelDOWN-based adapters are supported by PouchDB and guaranteed to run with 100% -fidelity. Furthermore, adapters like RiakDOWN, MySQLDown, RedisDOWN, etc. rarely provide the feature that fans of those -databases hope to get out of them, i.e. PouchDB's magical syncing capabilities on top of their favorite database. In truth, -what those adapters do is reduce the underlying database to a LevelDB-like key-value store and require PouchDB to implement -its own revision-handling on top of them. In other words, they tend to be unperformant and require you to exclusively use -PouchDB's API to access them. - -Only certain adapters (e.g. in-memory, based on [MemDOWN](http://github.com/level/memdown)) are officially supported by the -PouchDB project; you are free to experiment with other datastores, but be forewarned of the above caveats. To see how to -build a custom LevelDOWN-based database, see the source code for `pouchdb-adapter-memory` or `pouchdb-adapter-localstorage`. - -### Details - -For full API documentation and guides on PouchDB, see [PouchDB.com](http://pouchdb.com/). For details on PouchDB sub-packages, see the [Custom Builds documentation](http://pouchdb.com/custom.html). - -### Warning: semver-free zone! - -This package is conceptually an internal API used by PouchDB or its plugins. It does not follow semantic versioning (semver), and rather its version is pegged to PouchDB's. Use exact versions when installing, e.g. with `--save-exact`. - -### Source - -PouchDB and its sub-packages are distributed as a [monorepo](https://github.com/babel/babel/blob/master/doc/design/monorepo.md). - -For a full list of packages, see [the GitHub source](https://github.com/pouchdb/pouchdb/tree/master/packages). - - diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/package-lock.json b/packages/node_modules/pouchdb-adapter-leveldb-core/package-lock.json deleted file mode 100644 index 2fff6a3168..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/package-lock.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "pouchdb-adapter-leveldb-core", - "version": "7.0.0-prerelease", - "lockfileVersion": 1 -} diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/package.json b/packages/node_modules/pouchdb-adapter-leveldb-core/package.json deleted file mode 100644 index d505d06186..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/package.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "pouchdb-adapter-leveldb-core", - "version": "7.0.0-prerelease", - "description": "Core PouchDB adapter code for LevelDOWN-based adapters", - "main": "./lib/index.js", - "keywords": [], - "author": "Dale Harvey ", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "https://github.com/pouchdb/pouchdb.git", - "directory": "packages/node_modules/pouchdb-adapter-leveldb-core" - }, - "module": "./src/index.js", - "browser": { - "./lib/index.js": "./lib/index-browser.js", - "./src/createEmptyBlobOrBuffer.js": "./src/createEmptyBlobOrBuffer-browser.js", - "./src/prepareAttachmentForStorage.js": "./src/prepareAttachmentForStorage-browser.js", - "./src/readAsBlobOrBuffer.js": "./src/readAsBlobOrBuffer-browser.js" - } -} diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/src/createEmptyBlobOrBuffer-browser.js b/packages/node_modules/pouchdb-adapter-leveldb-core/src/createEmptyBlobOrBuffer-browser.js deleted file mode 100644 index 213a295334..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/src/createEmptyBlobOrBuffer-browser.js +++ /dev/null @@ -1,7 +0,0 @@ -import { blob as createBlob } from 'pouchdb-binary-utils'; - -function createEmptyBlobOrBuffer(type) { - return createBlob([''], {type}); -} - -export default createEmptyBlobOrBuffer; diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/src/createEmptyBlobOrBuffer.js b/packages/node_modules/pouchdb-adapter-leveldb-core/src/createEmptyBlobOrBuffer.js deleted file mode 100644 index ee07335ce9..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/src/createEmptyBlobOrBuffer.js +++ /dev/null @@ -1,7 +0,0 @@ -import { typedBuffer } from 'pouchdb-binary-utils'; - -function createEmptyBlobOrBuffer(type) { - return typedBuffer('', 'binary', type); -} - -export default createEmptyBlobOrBuffer; diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/src/index.js b/packages/node_modules/pouchdb-adapter-leveldb-core/src/index.js deleted file mode 100644 index 6033cdf088..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/src/index.js +++ /dev/null @@ -1,1514 +0,0 @@ -import levelup from 'levelup'; -import sublevel from 'sublevel-pouchdb'; -import { obj as through } from 'through2'; -import Deque from 'double-ended-queue'; -import PouchDB from 'pouchdb-core'; -import { - clone, - changesHandler as Changes, - filterChange, - functionName, - uuid, - nextTick -} from 'pouchdb-utils'; -import { - allDocsKeysQuery, - isDeleted, - isLocalId, - parseDoc, - processDocs -} from 'pouchdb-adapter-utils'; -import { - winningRev as calculateWinningRev, - traverseRevTree, - compactTree, - collectConflicts, - latest as getLatest -} from 'pouchdb-merge'; -import { - safeJsonParse, - safeJsonStringify -} from 'pouchdb-json'; - -import { - binaryMd5 -} from 'pouchdb-md5'; - -import { - atob, - binaryStringToBlobOrBuffer as binStringToBluffer -} from 'pouchdb-binary-utils'; - -import readAsBluffer from './readAsBlobOrBuffer'; -import prepareAttachmentForStorage from './prepareAttachmentForStorage'; -import createEmptyBluffer from './createEmptyBlobOrBuffer'; - -import LevelTransaction from './transaction'; - -import { - MISSING_DOC, - REV_CONFLICT, - NOT_OPEN, - BAD_ARG, - MISSING_STUB, - createError -} from 'pouchdb-errors'; - -var DOC_STORE = 'document-store'; -var BY_SEQ_STORE = 'by-sequence'; -var ATTACHMENT_STORE = 'attach-store'; -var BINARY_STORE = 'attach-binary-store'; -var LOCAL_STORE = 'local-store'; -var META_STORE = 'meta-store'; - -// leveldb barks if we try to open a db multiple times -// so we cache opened connections here for initstore() -var dbStores = new Map(); - -// store the value of update_seq in the by-sequence store the key name will -// never conflict, since the keys in the by-sequence store are integers -var UPDATE_SEQ_KEY = '_local_last_update_seq'; -var DOC_COUNT_KEY = '_local_doc_count'; -var UUID_KEY = '_local_uuid'; - -var MD5_PREFIX = 'md5-'; - -var safeJsonEncoding = { - encode: safeJsonStringify, - decode: safeJsonParse, - buffer: false, - type: 'cheap-json' -}; - -var levelChanges = new Changes(); - -// winningRev and deleted are performance-killers, but -// in newer versions of PouchDB, they are cached on the metadata -function getWinningRev(metadata) { - return 'winningRev' in metadata ? - metadata.winningRev : calculateWinningRev(metadata); -} - -function getIsDeleted(metadata, winningRev) { - return 'deleted' in metadata ? - metadata.deleted : isDeleted(metadata, winningRev); -} - -function fetchAttachment(att, stores, opts) { - var type = att.content_type; - return new Promise(function (resolve, reject) { - stores.binaryStore.get(att.digest, function (err, buffer) { - var data; - if (err) { - /* istanbul ignore if */ - if (err.name !== 'NotFoundError') { - return reject(err); - } else { - // empty - if (!opts.binary) { - data = ''; - } else { - data = binStringToBluffer('', type); - } - } - } else { // non-empty - if (opts.binary) { - data = readAsBluffer(buffer, type); - } else { - data = buffer.toString('base64'); - } - } - delete att.stub; - delete att.length; - att.data = data; - resolve(); - }); - }); -} - -function fetchAttachments(results, stores, opts) { - var atts = []; - results.forEach(function (row) { - if (!(row.doc && row.doc._attachments)) { - return; - } - var attNames = Object.keys(row.doc._attachments); - attNames.forEach(function (attName) { - var att = row.doc._attachments[attName]; - if (!('data' in att)) { - atts.push(att); - } - }); - }); - - return Promise.all(atts.map(function (att) { - return fetchAttachment(att, stores, opts); - })); -} - -function LevelPouch(opts, callback) { - opts = clone(opts); - var api = this; - var instanceId; - var stores = {}; - var revLimit = opts.revs_limit; - var db; - var name = opts.name; - // TODO: this is undocumented and unused probably - /* istanbul ignore else */ - if (typeof opts.createIfMissing === 'undefined') { - opts.createIfMissing = true; - } - - var leveldown = opts.db; - - var dbStore; - var leveldownName = functionName(leveldown); - if (dbStores.has(leveldownName)) { - dbStore = dbStores.get(leveldownName); - } else { - dbStore = new Map(); - dbStores.set(leveldownName, dbStore); - } - if (dbStore.has(name)) { - db = dbStore.get(name); - afterDBCreated(); - } else { - dbStore.set(name, sublevel(levelup(leveldown(name), opts, function (err) { - /* istanbul ignore if */ - if (err) { - dbStore.delete(name); - return callback(err); - } - db = dbStore.get(name); - db._docCount = -1; - db._queue = new Deque(); - /* istanbul ignore else */ - if (typeof opts.migrate === 'object') { // migration for leveldown - opts.migrate.doMigrationOne(name, db, afterDBCreated); - } else { - afterDBCreated(); - } - }))); - } - - function afterDBCreated() { - stores.docStore = db.sublevel(DOC_STORE, {valueEncoding: safeJsonEncoding}); - stores.bySeqStore = db.sublevel(BY_SEQ_STORE, {valueEncoding: 'json'}); - stores.attachmentStore = - db.sublevel(ATTACHMENT_STORE, {valueEncoding: 'json'}); - stores.binaryStore = db.sublevel(BINARY_STORE, {valueEncoding: 'binary'}); - stores.localStore = db.sublevel(LOCAL_STORE, {valueEncoding: 'json'}); - stores.metaStore = db.sublevel(META_STORE, {valueEncoding: 'json'}); - /* istanbul ignore else */ - if (typeof opts.migrate === 'object') { // migration for leveldown - opts.migrate.doMigrationTwo(db, stores, afterLastMigration); - } else { - afterLastMigration(); - } - } - - function afterLastMigration() { - stores.metaStore.get(UPDATE_SEQ_KEY, function (err, value) { - if (typeof db._updateSeq === 'undefined') { - db._updateSeq = value || 0; - } - stores.metaStore.get(DOC_COUNT_KEY, function (err, value) { - db._docCount = !err ? value : 0; - stores.metaStore.get(UUID_KEY, function (err, value) { - instanceId = !err ? value : uuid(); - stores.metaStore.put(UUID_KEY, instanceId, function () { - nextTick(function () { - callback(null, api); - }); - }); - }); - }); - }); - } - - function countDocs(callback) { - /* istanbul ignore if */ - if (db.isClosed()) { - return callback(new Error('database is closed')); - } - return callback(null, db._docCount); // use cached value - } - - api._remote = false; - /* istanbul ignore next */ - api.type = function () { - return 'leveldb'; - }; - - api._id = function (callback) { - callback(null, instanceId); - }; - - api._info = function (callback) { - var res = { - doc_count: db._docCount, - update_seq: db._updateSeq, - backend_adapter: functionName(leveldown) - }; - return nextTick(function () { - callback(null, res); - }); - }; - - function tryCode(fun, args) { - try { - fun.apply(null, args); - } catch (err) { - args[args.length - 1](err); - } - } - - function executeNext() { - var firstTask = db._queue.peekFront(); - - if (firstTask.type === 'read') { - runReadOperation(firstTask); - } else { // write, only do one at a time - runWriteOperation(firstTask); - } - } - - function runReadOperation(firstTask) { - // do multiple reads at once simultaneously, because it's safe - - var readTasks = [firstTask]; - var i = 1; - var nextTask = db._queue.get(i); - while (typeof nextTask !== 'undefined' && nextTask.type === 'read') { - readTasks.push(nextTask); - i++; - nextTask = db._queue.get(i); - } - - var numDone = 0; - - readTasks.forEach(function (readTask) { - var args = readTask.args; - var callback = args[args.length - 1]; - args[args.length - 1] = function (...cbArgs) { - callback.apply(null, cbArgs); - if (++numDone === readTasks.length) { - nextTick(function () { - // all read tasks have finished - readTasks.forEach(function () { - db._queue.shift(); - }); - if (db._queue.length) { - executeNext(); - } - }); - } - }; - tryCode(readTask.fun, args); - }); - } - - function runWriteOperation(firstTask) { - var args = firstTask.args; - var callback = args[args.length - 1]; - args[args.length - 1] = function (...cbArgs) { - callback.apply(null, cbArgs); - nextTick(function () { - db._queue.shift(); - if (db._queue.length) { - executeNext(); - } - }); - }; - tryCode(firstTask.fun, args); - } - - // all read/write operations to the database are done in a queue, - // similar to how websql/idb works. this avoids problems such - // as e.g. compaction needing to have a lock on the database while - // it updates stuff. in the future we can revisit this. - function writeLock(fun) { - return function (...args) { - db._queue.push({ - fun, - args, - type: 'write' - }); - - if (db._queue.length === 1) { - nextTick(executeNext); - } - }; - } - - // same as the writelock, but multiple can run at once - function readLock(fun) { - return function (...args) { - db._queue.push({ - fun, - args, - type: 'read' - }); - - if (db._queue.length === 1) { - nextTick(executeNext); - } - }; - } - - function formatSeq(n) { - return ('0000000000000000' + n).slice(-16); - } - - function parseSeq(s) { - return parseInt(s, 10); - } - - api._get = readLock(function (id, opts, callback) { - opts = clone(opts); - - stores.docStore.get(id, function (err, metadata) { - - if (err || !metadata) { - return callback(createError(MISSING_DOC, 'missing')); - } - - var rev; - if (!opts.rev) { - rev = getWinningRev(metadata); - var deleted = getIsDeleted(metadata, rev); - if (deleted) { - return callback(createError(MISSING_DOC, "deleted")); - } - } else { - rev = opts.latest ? getLatest(opts.rev, metadata) : opts.rev; - } - - var seq = metadata.rev_map[rev]; - - stores.bySeqStore.get(formatSeq(seq), function (err, doc) { - if (!doc) { - return callback(createError(MISSING_DOC)); - } - /* istanbul ignore if */ - if ('_id' in doc && doc._id !== metadata.id) { - // this failing implies something very wrong - return callback(new Error('wrong doc returned')); - } - doc._id = metadata.id; - if ('_rev' in doc) { - /* istanbul ignore if */ - if (doc._rev !== rev) { - // this failing implies something very wrong - return callback(new Error('wrong doc returned')); - } - } else { - // we didn't always store this - doc._rev = rev; - } - return callback(null, {doc, metadata}); - }); - }); - }); - - // not technically part of the spec, but if putAttachment has its own - // method... - api._getAttachment = function (docId, attachId, attachment, opts, callback) { - var digest = attachment.digest; - var type = attachment.content_type; - - stores.binaryStore.get(digest, function (err, attach) { - if (err) { - /* istanbul ignore if */ - if (err.name !== 'NotFoundError') { - return callback(err); - } - // Empty attachment - return callback(null, opts.binary ? createEmptyBluffer(type) : ''); - } - - if (opts.binary) { - callback(null, readAsBluffer(attach, type)); - } else { - callback(null, attach.toString('base64')); - } - }); - }; - - api._bulkDocs = writeLock(function (req, opts, callback) { - var newEdits = opts.new_edits; - var results = new Array(req.docs.length); - var fetchedDocs = new Map(); - var stemmedRevs = new Map(); - - var txn = new LevelTransaction(); - var docCountDelta = 0; - var newUpdateSeq = db._updateSeq; - - // parse the docs and give each a sequence number - var userDocs = req.docs; - var docInfos = userDocs.map(function (doc) { - if (doc._id && isLocalId(doc._id)) { - return doc; - } - var newDoc = parseDoc(doc, newEdits, api.__opts); - - if (newDoc.metadata && !newDoc.metadata.rev_map) { - newDoc.metadata.rev_map = {}; - } - - return newDoc; - }); - var infoErrors = docInfos.filter(function (doc) { - return doc.error; - }); - - if (infoErrors.length) { - return callback(infoErrors[0]); - } - - // verify any stub attachments as a precondition test - - function verifyAttachment(digest, callback) { - txn.get(stores.attachmentStore, digest, function (levelErr) { - if (levelErr) { - var err = createError(MISSING_STUB, - 'unknown stub attachment with digest ' + - digest); - callback(err); - } else { - callback(); - } - }); - } - - function verifyAttachments(finish) { - var digests = []; - userDocs.forEach(function (doc) { - if (doc && doc._attachments) { - Object.keys(doc._attachments).forEach(function (filename) { - var att = doc._attachments[filename]; - if (att.stub) { - digests.push(att.digest); - } - }); - } - }); - if (!digests.length) { - return finish(); - } - var numDone = 0; - var err; - - digests.forEach(function (digest) { - verifyAttachment(digest, function (attErr) { - if (attErr && !err) { - err = attErr; - } - - if (++numDone === digests.length) { - finish(err); - } - }); - }); - } - - function fetchExistingDocs(finish) { - var numDone = 0; - var overallErr; - function checkDone() { - if (++numDone === userDocs.length) { - return finish(overallErr); - } - } - - userDocs.forEach(function (doc) { - if (doc._id && isLocalId(doc._id)) { - // skip local docs - return checkDone(); - } - txn.get(stores.docStore, doc._id, function (err, info) { - if (err) { - /* istanbul ignore if */ - if (err.name !== 'NotFoundError') { - overallErr = err; - } - } else { - fetchedDocs.set(doc._id, info); - } - checkDone(); - }); - }); - } - - function compact(revsMap, callback) { - var promise = Promise.resolve(); - revsMap.forEach(function (revs, docId) { - // TODO: parallelize, for now need to be sequential to - // pass orphaned attachment tests - promise = promise.then(function () { - return new Promise(function (resolve, reject) { - api._doCompactionNoLock(docId, revs, {ctx: txn}, function (err) { - /* istanbul ignore if */ - if (err) { - return reject(err); - } - resolve(); - }); - }); - }); - }); - - promise.then(function () { - callback(); - }, callback); - } - - function autoCompact(callback) { - var revsMap = new Map(); - fetchedDocs.forEach(function (metadata, docId) { - revsMap.set(docId, compactTree(metadata)); - }); - compact(revsMap, callback); - } - - function finish() { - compact(stemmedRevs, function (error) { - /* istanbul ignore if */ - if (error) { - complete(error); - } - if (api.auto_compaction) { - return autoCompact(complete); - } - complete(); - }); - } - - function writeDoc(docInfo, winningRev, winningRevIsDeleted, newRevIsDeleted, - isUpdate, delta, resultsIdx, callback2) { - docCountDelta += delta; - - var err = null; - var recv = 0; - - docInfo.metadata.winningRev = winningRev; - docInfo.metadata.deleted = winningRevIsDeleted; - - docInfo.data._id = docInfo.metadata.id; - docInfo.data._rev = docInfo.metadata.rev; - - if (newRevIsDeleted) { - docInfo.data._deleted = true; - } - - if (docInfo.stemmedRevs.length) { - stemmedRevs.set(docInfo.metadata.id, docInfo.stemmedRevs); - } - - var attachments = docInfo.data._attachments ? - Object.keys(docInfo.data._attachments) : - []; - - function attachmentSaved(attachmentErr) { - recv++; - if (!err) { - /* istanbul ignore if */ - if (attachmentErr) { - err = attachmentErr; - callback2(err); - } else if (recv === attachments.length) { - finish(); - } - } - } - - function onMD5Load(doc, key, data, attachmentSaved) { - return function (result) { - saveAttachment(doc, MD5_PREFIX + result, key, data, attachmentSaved); - }; - } - - function doMD5(doc, key, attachmentSaved) { - return function (data) { - binaryMd5(data, onMD5Load(doc, key, data, attachmentSaved)); - }; - } - - for (var i = 0; i < attachments.length; i++) { - var key = attachments[i]; - var att = docInfo.data._attachments[key]; - - if (att.stub) { - // still need to update the refs mapping - var id = docInfo.data._id; - var rev = docInfo.data._rev; - saveAttachmentRefs(id, rev, att.digest, attachmentSaved); - continue; - } - var data; - if (typeof att.data === 'string') { - // input is assumed to be a base64 string - try { - data = atob(att.data); - } catch (e) { - callback(createError(BAD_ARG, - 'Attachment is not a valid base64 string')); - return; - } - doMD5(docInfo, key, attachmentSaved)(data); - } else { - prepareAttachmentForStorage(att.data, - doMD5(docInfo, key, attachmentSaved)); - } - } - - function finish() { - var seq = docInfo.metadata.rev_map[docInfo.metadata.rev]; - /* istanbul ignore if */ - if (seq) { - // check that there aren't any existing revisions with the same - // revision id, else we shouldn't do anything - return callback2(); - } - seq = ++newUpdateSeq; - docInfo.metadata.rev_map[docInfo.metadata.rev] = - docInfo.metadata.seq = seq; - var seqKey = formatSeq(seq); - var batch = [{ - key: seqKey, - value: docInfo.data, - prefix: stores.bySeqStore, - type: 'put' - }, { - key: docInfo.metadata.id, - value: docInfo.metadata, - prefix: stores.docStore, - type: 'put' - }]; - txn.batch(batch); - results[resultsIdx] = { - ok: true, - id: docInfo.metadata.id, - rev: docInfo.metadata.rev - }; - fetchedDocs.set(docInfo.metadata.id, docInfo.metadata); - callback2(); - } - - if (!attachments.length) { - finish(); - } - } - - // attachments are queued per-digest, otherwise the refs could be - // overwritten by concurrent writes in the same bulkDocs session - var attachmentQueues = {}; - - function saveAttachmentRefs(id, rev, digest, callback) { - - function fetchAtt() { - return new Promise(function (resolve, reject) { - txn.get(stores.attachmentStore, digest, function (err, oldAtt) { - /* istanbul ignore if */ - if (err && err.name !== 'NotFoundError') { - return reject(err); - } - resolve(oldAtt); - }); - }); - } - - function saveAtt(oldAtt) { - var ref = [id, rev].join('@'); - var newAtt = {}; - - if (oldAtt) { - if (oldAtt.refs) { - // only update references if this attachment already has them - // since we cannot migrate old style attachments here without - // doing a full db scan for references - newAtt.refs = oldAtt.refs; - newAtt.refs[ref] = true; - } - } else { - newAtt.refs = {}; - newAtt.refs[ref] = true; - } - - return new Promise(function (resolve) { - txn.batch([{ - type: 'put', - prefix: stores.attachmentStore, - key: digest, - value: newAtt - }]); - resolve(!oldAtt); - }); - } - - // put attachments in a per-digest queue, to avoid two docs with the same - // attachment overwriting each other - var queue = attachmentQueues[digest] || Promise.resolve(); - attachmentQueues[digest] = queue.then(function () { - return fetchAtt().then(saveAtt).then(function (isNewAttachment) { - callback(null, isNewAttachment); - }, callback); - }); - } - - function saveAttachment(docInfo, digest, key, data, callback) { - var att = docInfo.data._attachments[key]; - delete att.data; - att.digest = digest; - att.length = data.length; - var id = docInfo.metadata.id; - var rev = docInfo.metadata.rev; - att.revpos = parseInt(rev, 10); - - saveAttachmentRefs(id, rev, digest, function (err, isNewAttachment) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - // do not try to store empty attachments - if (data.length === 0) { - return callback(err); - } - if (!isNewAttachment) { - // small optimization - don't bother writing it again - return callback(err); - } - txn.batch([{ - type: 'put', - prefix: stores.binaryStore, - key: digest, - value: Buffer.from(data, 'binary') - }]); - callback(); - }); - } - - function complete(err) { - /* istanbul ignore if */ - if (err) { - return nextTick(function () { - callback(err); - }); - } - txn.batch([ - { - prefix: stores.metaStore, - type: 'put', - key: UPDATE_SEQ_KEY, - value: newUpdateSeq - }, - { - prefix: stores.metaStore, - type: 'put', - key: DOC_COUNT_KEY, - value: db._docCount + docCountDelta - } - ]); - txn.execute(db, function (err) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - db._docCount += docCountDelta; - db._updateSeq = newUpdateSeq; - levelChanges.notify(name); - nextTick(function () { - callback(null, results); - }); - }); - } - - if (!docInfos.length) { - return callback(null, []); - } - - verifyAttachments(function (err) { - if (err) { - return callback(err); - } - fetchExistingDocs(function (err) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - processDocs(revLimit, docInfos, api, fetchedDocs, txn, results, - writeDoc, opts, finish); - }); - }); - }); - api._allDocs = function (opts, callback) { - if ('keys' in opts) { - return allDocsKeysQuery(this, opts); - } - return readLock(function (opts, callback) { - opts = clone(opts); - countDocs(function (err, docCount) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - var readstreamOpts = {}; - var skip = opts.skip || 0; - if (opts.startkey) { - readstreamOpts.gte = opts.startkey; - } - if (opts.endkey) { - readstreamOpts.lte = opts.endkey; - } - if (opts.key) { - readstreamOpts.gte = readstreamOpts.lte = opts.key; - } - if (opts.descending) { - readstreamOpts.reverse = true; - // switch start and ends - var tmp = readstreamOpts.lte; - readstreamOpts.lte = readstreamOpts.gte; - readstreamOpts.gte = tmp; - } - var limit; - if (typeof opts.limit === 'number') { - limit = opts.limit; - } - if (limit === 0 || - ('gte' in readstreamOpts && 'lte' in readstreamOpts && - readstreamOpts.gte > readstreamOpts.lte)) { - // should return 0 results when start is greater than end. - // normally level would "fix" this for us by reversing the order, - // so short-circuit instead - var returnVal = { - total_rows: docCount, - offset: opts.skip, - rows: [] - }; - /* istanbul ignore if */ - if (opts.update_seq) { - returnVal.update_seq = db._updateSeq; - } - return callback(null, returnVal); - } - var results = []; - var docstream = stores.docStore.readStream(readstreamOpts); - - var throughStream = through(function (entry, _, next) { - var metadata = entry.value; - // winningRev and deleted are performance-killers, but - // in newer versions of PouchDB, they are cached on the metadata - var winningRev = getWinningRev(metadata); - var deleted = getIsDeleted(metadata, winningRev); - if (!deleted) { - if (skip-- > 0) { - next(); - return; - } else if (typeof limit === 'number' && limit-- <= 0) { - docstream.unpipe(); - docstream.destroy(); - next(); - return; - } - } else if (opts.deleted !== 'ok') { - next(); - return; - } - function allDocsInner(data) { - var doc = { - id: metadata.id, - key: metadata.id, - value: { - rev: winningRev - } - }; - if (opts.include_docs) { - doc.doc = data; - doc.doc._rev = doc.value.rev; - if (opts.conflicts) { - var conflicts = collectConflicts(metadata); - if (conflicts.length) { - doc.doc._conflicts = conflicts; - } - } - for (var att in doc.doc._attachments) { - if (Object.prototype.hasOwnProperty.call(doc.doc._attachments, att)) { - doc.doc._attachments[att].stub = true; - } - } - } - if (opts.inclusive_end === false && metadata.id === opts.endkey) { - return next(); - } else if (deleted) { - if (opts.deleted === 'ok') { - doc.value.deleted = true; - doc.doc = null; - } else { - /* istanbul ignore next */ - return next(); - } - } - results.push(doc); - next(); - } - if (opts.include_docs) { - var seq = metadata.rev_map[winningRev]; - stores.bySeqStore.get(formatSeq(seq), function (err, data) { - allDocsInner(data); - }); - } - else { - allDocsInner(); - } - }, function (next) { - Promise.resolve().then(function () { - if (opts.include_docs && opts.attachments) { - return fetchAttachments(results, stores, opts); - } - }).then(function () { - var returnVal = { - total_rows: docCount, - offset: opts.skip, - rows: results - }; - - /* istanbul ignore if */ - if (opts.update_seq) { - returnVal.update_seq = db._updateSeq; - } - callback(null, returnVal); - }, callback); - next(); - }).on('unpipe', function () { - throughStream.end(); - }); - - docstream.on('error', callback); - - docstream.pipe(throughStream); - }); - })(opts, callback); - }; - - api._changes = function (opts) { - opts = clone(opts); - - if (opts.continuous) { - var id = name + ':' + uuid(); - levelChanges.addListener(name, id, api, opts); - levelChanges.notify(name); - return { - cancel: function () { - levelChanges.removeListener(name, id); - } - }; - } - - var descending = opts.descending; - var results = []; - var lastSeq = opts.since || 0; - var called = 0; - var streamOpts = { - reverse: descending - }; - var limit; - if ('limit' in opts && opts.limit > 0) { - limit = opts.limit; - } - if (!streamOpts.reverse) { - streamOpts.start = formatSeq(opts.since || 0); - } - - var docIds = opts.doc_ids && new Set(opts.doc_ids); - var filter = filterChange(opts); - var docIdsToMetadata = new Map(); - - function complete() { - opts.done = true; - if (opts.return_docs && opts.limit) { - /* istanbul ignore if */ - if (opts.limit < results.length) { - results.length = opts.limit; - } - } - changeStream.unpipe(throughStream); - changeStream.destroy(); - if (!opts.continuous && !opts.cancelled) { - if (opts.include_docs && opts.attachments && opts.return_docs) { - fetchAttachments(results, stores, opts).then(function () { - opts.complete(null, {results, last_seq: lastSeq}); - }); - } else { - opts.complete(null, {results, last_seq: lastSeq}); - } - } - } - var changeStream = stores.bySeqStore.readStream(streamOpts); - var throughStream = through(function (data, _, next) { - if (limit && called >= limit) { - complete(); - return next(); - } - if (opts.cancelled || opts.done) { - return next(); - } - - var seq = parseSeq(data.key); - var doc = data.value; - - if (seq === opts.since && !descending) { - // couchdb ignores `since` if descending=true - return next(); - } - - if (docIds && !docIds.has(doc._id)) { - return next(); - } - - var metadata; - - function onGetMetadata(metadata) { - var winningRev = getWinningRev(metadata); - - function onGetWinningDoc(winningDoc) { - - var change = opts.processChange(winningDoc, metadata, opts); - change.seq = metadata.seq; - - var filtered = filter(change); - if (typeof filtered === 'object') { - return opts.complete(filtered); - } - - if (filtered) { - called++; - - if (opts.attachments && opts.include_docs) { - // fetch attachment immediately for the benefit - // of live listeners - fetchAttachments([change], stores, opts).then(function () { - opts.onChange(change); - }); - } else { - opts.onChange(change); - } - - if (opts.return_docs) { - results.push(change); - } - } - next(); - } - - if (metadata.seq !== seq) { - // some other seq is later - return next(); - } - - lastSeq = seq; - - if (winningRev === doc._rev) { - return onGetWinningDoc(doc); - } - - // fetch the winner - - var winningSeq = metadata.rev_map[winningRev]; - - stores.bySeqStore.get(formatSeq(winningSeq), function (err, doc) { - onGetWinningDoc(doc); - }); - } - - metadata = docIdsToMetadata.get(doc._id); - if (metadata) { // cached - return onGetMetadata(metadata); - } - // metadata not cached, have to go fetch it - stores.docStore.get(doc._id, function (err, metadata) { - /* istanbul ignore if */ - if (opts.cancelled || opts.done || db.isClosed() || - isLocalId(metadata.id)) { - return next(); - } - docIdsToMetadata.set(doc._id, metadata); - onGetMetadata(metadata); - }); - }, function (next) { - if (opts.cancelled) { - return next(); - } - if (opts.return_docs && opts.limit) { - /* istanbul ignore if */ - if (opts.limit < results.length) { - results.length = opts.limit; - } - } - - next(); - }).on('unpipe', function () { - throughStream.end(); - complete(); - }); - changeStream.pipe(throughStream); - return { - cancel: function () { - opts.cancelled = true; - complete(); - } - }; - }; - - api._close = function (callback) { - /* istanbul ignore if */ - if (db.isClosed()) { - return callback(createError(NOT_OPEN)); - } - db.close(function (err) { - /* istanbul ignore if */ - if (err) { - callback(err); - } else { - dbStore.delete(name); - - var adapterName = functionName(leveldown); - var adapterStore = dbStores.get(adapterName); - var viewNamePrefix = PouchDB.prefix + name + "-mrview-"; - var keys = [...adapterStore.keys()].filter(k => k.includes(viewNamePrefix)); - keys.forEach(key => { - var eventEmitter = adapterStore.get(key); - eventEmitter.removeAllListeners(); - eventEmitter.close(); - adapterStore.delete(key); - }); - - callback(); - } - }); - }; - - api._getRevisionTree = function (docId, callback) { - stores.docStore.get(docId, function (err, metadata) { - if (err) { - callback(createError(MISSING_DOC)); - } else { - callback(null, metadata.rev_tree); - } - }); - }; - - api._doCompaction = writeLock(function (docId, revs, opts, callback) { - api._doCompactionNoLock(docId, revs, opts, callback); - }); - - // the NoLock version is for use by bulkDocs - api._doCompactionNoLock = function (docId, revs, opts, callback) { - if (typeof opts === 'function') { - callback = opts; - opts = {}; - } - - if (!revs.length) { - return callback(); - } - var txn = opts.ctx || new LevelTransaction(); - - txn.get(stores.docStore, docId, function (err, metadata) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - var seqs = revs.map(function (rev) { - var seq = metadata.rev_map[rev]; - delete metadata.rev_map[rev]; - return seq; - }); - traverseRevTree(metadata.rev_tree, function (isLeaf, pos, - revHash, ctx, opts) { - var rev = pos + '-' + revHash; - if (revs.indexOf(rev) !== -1) { - opts.status = 'missing'; - } - }); - - var batch = []; - batch.push({ - key: metadata.id, - value: metadata, - type: 'put', - prefix: stores.docStore - }); - - var digestMap = {}; - var numDone = 0; - var overallErr; - function checkDone(err) { - /* istanbul ignore if */ - if (err) { - overallErr = err; - } - if (++numDone === revs.length) { // done - /* istanbul ignore if */ - if (overallErr) { - return callback(overallErr); - } - deleteOrphanedAttachments(); - } - } - - function finish(err) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - txn.batch(batch); - if (opts.ctx) { - // don't execute immediately - return callback(); - } - txn.execute(db, callback); - } - - function deleteOrphanedAttachments() { - var possiblyOrphanedAttachments = Object.keys(digestMap); - if (!possiblyOrphanedAttachments.length) { - return finish(); - } - var numDone = 0; - var overallErr; - function checkDone(err) { - /* istanbul ignore if */ - if (err) { - overallErr = err; - } - if (++numDone === possiblyOrphanedAttachments.length) { - finish(overallErr); - } - } - var refsToDelete = new Map(); - revs.forEach(function (rev) { - refsToDelete.set(docId + '@' + rev, true); - }); - possiblyOrphanedAttachments.forEach(function (digest) { - txn.get(stores.attachmentStore, digest, function (err, attData) { - /* istanbul ignore if */ - if (err) { - if (err.name === 'NotFoundError') { - return checkDone(); - } else { - return checkDone(err); - } - } - var refs = Object.keys(attData.refs || {}).filter(function (ref) { - return !refsToDelete.has(ref); - }); - var newRefs = {}; - refs.forEach(function (ref) { - newRefs[ref] = true; - }); - if (refs.length) { // not orphaned - batch.push({ - key: digest, - type: 'put', - value: {refs: newRefs}, - prefix: stores.attachmentStore - }); - } else { // orphaned, can safely delete - batch = batch.concat([{ - key: digest, - type: 'del', - prefix: stores.attachmentStore - }, { - key: digest, - type: 'del', - prefix: stores.binaryStore - }]); - } - checkDone(); - }); - }); - } - - seqs.forEach(function (seq) { - batch.push({ - key: formatSeq(seq), - type: 'del', - prefix: stores.bySeqStore - }); - txn.get(stores.bySeqStore, formatSeq(seq), function (err, doc) { - /* istanbul ignore if */ - if (err) { - if (err.name === 'NotFoundError') { - return checkDone(); - } else { - return checkDone(err); - } - } - var atts = Object.keys(doc._attachments || {}); - atts.forEach(function (attName) { - var digest = doc._attachments[attName].digest; - digestMap[digest] = true; - }); - checkDone(); - }); - }); - }); - }; - - api._getLocal = function (id, callback) { - stores.localStore.get(id, function (err, doc) { - if (err) { - callback(createError(MISSING_DOC)); - } else { - callback(null, doc); - } - }); - }; - - api._putLocal = function (doc, opts, callback) { - if (typeof opts === 'function') { - callback = opts; - opts = {}; - } - if (opts.ctx) { - api._putLocalNoLock(doc, opts, callback); - } else { - api._putLocalWithLock(doc, opts, callback); - } - }; - - api._putLocalWithLock = writeLock(function (doc, opts, callback) { - api._putLocalNoLock(doc, opts, callback); - }); - - // the NoLock version is for use by bulkDocs - api._putLocalNoLock = function (doc, opts, callback) { - delete doc._revisions; // ignore this, trust the rev - var oldRev = doc._rev; - var id = doc._id; - - var txn = opts.ctx || new LevelTransaction(); - - txn.get(stores.localStore, id, function (err, resp) { - if (err && oldRev) { - return callback(createError(REV_CONFLICT)); - } - if (resp && resp._rev !== oldRev) { - return callback(createError(REV_CONFLICT)); - } - doc._rev = - oldRev ? '0-' + (parseInt(oldRev.split('-')[1], 10) + 1) : '0-1'; - var batch = [ - { - type: 'put', - prefix: stores.localStore, - key: id, - value: doc - } - ]; - - txn.batch(batch); - var ret = {ok: true, id: doc._id, rev: doc._rev}; - - if (opts.ctx) { - // don't execute immediately - return callback(null, ret); - } - txn.execute(db, function (err) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - callback(null, ret); - }); - }); - }; - - api._removeLocal = function (doc, opts, callback) { - if (typeof opts === 'function') { - callback = opts; - opts = {}; - } - if (opts.ctx) { - api._removeLocalNoLock(doc, opts, callback); - } else { - api._removeLocalWithLock(doc, opts, callback); - } - }; - - api._removeLocalWithLock = writeLock(function (doc, opts, callback) { - api._removeLocalNoLock(doc, opts, callback); - }); - - // the NoLock version is for use by bulkDocs - api._removeLocalNoLock = function (doc, opts, callback) { - var txn = opts.ctx || new LevelTransaction(); - txn.get(stores.localStore, doc._id, function (err, resp) { - if (err) { - /* istanbul ignore if */ - if (err.name !== 'NotFoundError') { - return callback(err); - } else { - return callback(createError(MISSING_DOC)); - } - } - if (resp._rev !== doc._rev) { - return callback(createError(REV_CONFLICT)); - } - txn.batch([{ - prefix: stores.localStore, - type: 'del', - key: doc._id - }]); - var ret = {ok: true, id: doc._id, rev: '0-0'}; - if (opts.ctx) { - // don't execute immediately - return callback(null, ret); - } - txn.execute(db, function (err) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - callback(null, ret); - }); - }); - }; - - // close and delete open leveldb stores - api._destroy = function (opts, callback) { - var dbStore; - var leveldownName = functionName(leveldown); - /* istanbul ignore else */ - if (dbStores.has(leveldownName)) { - dbStore = dbStores.get(leveldownName); - } else { - return callDestroy(name, callback); - } - - /* istanbul ignore else */ - if (dbStore.has(name)) { - levelChanges.removeAllListeners(name); - - dbStore.get(name).close(function () { - dbStore.delete(name); - callDestroy(name, callback); - }); - } else { - callDestroy(name, callback); - } - }; - function callDestroy(name, cb) { - // May not exist if leveldown is backed by memory adapter - /* istanbul ignore else */ - if ('destroy' in leveldown) { - leveldown.destroy(name, cb); - } else { - cb(null); - } - } -} - -export default LevelPouch; diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/src/prepareAttachmentForStorage-browser.js b/packages/node_modules/pouchdb-adapter-leveldb-core/src/prepareAttachmentForStorage-browser.js deleted file mode 100644 index 8c4bcc9cde..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/src/prepareAttachmentForStorage-browser.js +++ /dev/null @@ -1,8 +0,0 @@ -import { readAsBinaryString } from 'pouchdb-binary-utils'; - -// In the browser, we store a binary string -function prepareAttachmentForStorage(attData, cb) { - readAsBinaryString(attData, cb); -} - -export default prepareAttachmentForStorage; diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/src/prepareAttachmentForStorage.js b/packages/node_modules/pouchdb-adapter-leveldb-core/src/prepareAttachmentForStorage.js deleted file mode 100644 index b25ffce06c..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/src/prepareAttachmentForStorage.js +++ /dev/null @@ -1,6 +0,0 @@ -// in Node, we store the buffer directly -function prepareAttachmentForStorage(attData, cb) { - cb(attData); -} - -export default prepareAttachmentForStorage; diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/src/readAsBlobOrBuffer-browser.js b/packages/node_modules/pouchdb-adapter-leveldb-core/src/readAsBlobOrBuffer-browser.js deleted file mode 100644 index ec50d621c9..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/src/readAsBlobOrBuffer-browser.js +++ /dev/null @@ -1,11 +0,0 @@ -import { blob as createBlob } from 'pouchdb-binary-utils'; - -function readAsBlobOrBuffer(storedObject, type) { - // In the browser, we've stored a binary string. This now comes back as a - // browserified Node-style Buffer (implemented as a typed array), - // but we want a Blob instead. - var byteArray = new Uint8Array(storedObject); - return createBlob([byteArray], {type}); -} - -export default readAsBlobOrBuffer; diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/src/readAsBlobOrBuffer.js b/packages/node_modules/pouchdb-adapter-leveldb-core/src/readAsBlobOrBuffer.js deleted file mode 100644 index b5504452e2..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/src/readAsBlobOrBuffer.js +++ /dev/null @@ -1,7 +0,0 @@ -function readAsBlobOrBuffer(storedObject, type) { - // In Node, we've stored a buffer - storedObject.type = type; // non-standard, but used for consistency - return storedObject; -} - -export default readAsBlobOrBuffer; diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/src/transaction.js b/packages/node_modules/pouchdb-adapter-leveldb-core/src/transaction.js deleted file mode 100644 index 564301ee45..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb-core/src/transaction.js +++ /dev/null @@ -1,85 +0,0 @@ -// similar to an idb or websql transaction object -// designed to be passed around. basically just caches -// things in-memory and then does a big batch() operation -// when you're done - -import { nextTick } from 'pouchdb-utils'; - -function getCacheFor(transaction, store) { - var prefix = store.prefix()[0]; - var cache = transaction._cache; - var subCache = cache.get(prefix); - if (!subCache) { - subCache = new Map(); - cache.set(prefix, subCache); - } - return subCache; -} - -class LevelTransaction { - constructor() { - this._batch = []; - this._cache = new Map(); - } - - get(store, key, callback) { - var cache = getCacheFor(this, store); - var exists = cache.get(key); - if (exists) { - return nextTick(function () { - callback(null, exists); - }); - } else if (exists === null) { // deleted marker - /* istanbul ignore next */ - return nextTick(function () { - callback({name: 'NotFoundError'}); - }); - } - store.get(key, function (err, res) { - if (err) { - /* istanbul ignore else */ - if (err.name === 'NotFoundError') { - cache.set(key, null); - } - return callback(err); - } - cache.set(key, res); - callback(null, res); - }); - } - - batch(batch) { - for (var i = 0, len = batch.length; i < len; i++) { - var operation = batch[i]; - - var cache = getCacheFor(this, operation.prefix); - - if (operation.type === 'put') { - cache.set(operation.key, operation.value); - } else { - cache.set(operation.key, null); - } - } - this._batch = this._batch.concat(batch); - } - - execute(db, callback) { - var keys = new Set(); - var uniqBatches = []; - - // remove duplicates; last one wins - for (var i = this._batch.length - 1; i >= 0; i--) { - var operation = this._batch[i]; - var lookupKey = operation.prefix.prefix()[0] + '\xff' + operation.key; - if (keys.has(lookupKey)) { - continue; - } - keys.add(lookupKey); - uniqBatches.push(operation); - } - - db.batch(uniqBatches, callback); - } -} - -export default LevelTransaction; diff --git a/packages/node_modules/pouchdb-adapter-leveldb/README.md b/packages/node_modules/pouchdb-adapter-leveldb/README.md deleted file mode 100644 index d70a683de7..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb/README.md +++ /dev/null @@ -1,25 +0,0 @@ -pouchdb-adapter-leveldb -====== - -PouchDB adapter using LevelDB as its backing store. Designed to run in Node. Its adapter name is `'leveldb'`. - -### Usage - -```bash -npm install pouchdb -``` - -```js -PouchDB.plugin(require('pouchdb-adapter-leveldb')); -var db = new PouchDB('my_db', {adapter: 'leveldb'}); -``` - -For full API documentation and guides on PouchDB, see [PouchDB.com](http://pouchdb.com/). For details on PouchDB sub-packages, see the [Custom Builds documentation](http://pouchdb.com/custom.html). - -### Source - -PouchDB and its sub-packages are distributed as a [monorepo](https://github.com/babel/babel/blob/master/doc/design/monorepo.md). - -For a full list of packages, see [the GitHub source](https://github.com/pouchdb/pouchdb/tree/master/packages). - - diff --git a/packages/node_modules/pouchdb-adapter-leveldb/package-lock.json b/packages/node_modules/pouchdb-adapter-leveldb/package-lock.json deleted file mode 100644 index 1fff7cc950..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb/package-lock.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "pouchdb-adapter-leveldb", - "version": "7.0.0-prerelease", - "lockfileVersion": 1 -} diff --git a/packages/node_modules/pouchdb-adapter-leveldb/package.json b/packages/node_modules/pouchdb-adapter-leveldb/package.json deleted file mode 100644 index 48629e4343..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "pouchdb-adapter-leveldb", - "version": "7.0.0-prerelease", - "description": "PouchDB adapter using LevelDB as its backing store.", - "main": "./lib/index.js", - "keywords": [], - "browser": { - "leveldown": false - }, - "author": "Dale Harvey ", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "https://github.com/pouchdb/pouchdb.git", - "directory": "packages/node_modules/pouchdb-adapter-leveldb" - }, - "module": "./src/index.js" -} diff --git a/packages/node_modules/pouchdb-adapter-leveldb/src/index.js b/packages/node_modules/pouchdb-adapter-leveldb/src/index.js deleted file mode 100644 index e47209ffbb..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb/src/index.js +++ /dev/null @@ -1,38 +0,0 @@ -import CoreLevelPouch from 'pouchdb-adapter-leveldb-core'; - -import requireLeveldown from './requireLeveldown'; -import migrate from './migrate'; - -function LevelDownPouch(opts, callback) { - - // Users can pass in their own leveldown alternative here, in which case - // it overrides the default one. (This is in addition to the custom builds.) - var leveldown = opts.db; - - /* istanbul ignore else */ - if (!leveldown) { - leveldown = requireLeveldown(); - - /* istanbul ignore if */ - if (leveldown instanceof Error) { - return callback(leveldown); - } - } - - var _opts = Object.assign({ - db: leveldown, - migrate - }, opts); - - CoreLevelPouch.call(this, _opts, callback); -} - -// overrides for normal LevelDB behavior on Node -LevelDownPouch.valid = function () { - return true; -}; -LevelDownPouch.use_prefix = false; - -export default function (PouchDB) { - PouchDB.adapter('leveldb', LevelDownPouch, true); -} diff --git a/packages/node_modules/pouchdb-adapter-leveldb/src/migrate.js b/packages/node_modules/pouchdb-adapter-leveldb/src/migrate.js deleted file mode 100644 index 4f5566c187..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb/src/migrate.js +++ /dev/null @@ -1,194 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { isLocalId, winningRev } from 'pouchdb-merge'; -import level from 'level'; -import { obj as through } from 'through2'; -import LevelWriteStream from 'level-write-stream'; - -var stores = [ - 'document-store', - 'by-sequence', - 'attach-store', - 'attach-binary-store' -]; -function formatSeq(n) { - return ('0000000000000000' + n).slice(-16); -} -var UPDATE_SEQ_KEY = '_local_last_update_seq'; -var DOC_COUNT_KEY = '_local_doc_count'; -var UUID_KEY = '_local_uuid'; - -var doMigrationOne = function (name, db, callback) { - // local require to prevent crashing if leveldown isn't installed. - var leveldown = require("leveldown"); - - var base = path.resolve(name); - function move(store, index, cb) { - var storePath = path.join(base, store); - var opts; - if (index === 3) { - opts = { - valueEncoding: 'binary' - }; - } else { - opts = { - valueEncoding: 'json' - }; - } - var sub = db.sublevel(store, opts); - var orig = level(storePath, opts); - var from = orig.createReadStream(); - var writeStream = new LevelWriteStream(sub); - var to = writeStream(); - from.on('end', function () { - orig.close(function (err) { - cb(err, storePath); - }); - }); - from.pipe(to); - } - fs.unlink(base + '.uuid', function (err) { - if (err) { - return callback(); - } - var todo = 4; - var done = []; - stores.forEach(function (store, i) { - move(store, i, function (err, storePath) { - /* istanbul ignore if */ - if (err) { - return callback(err); - } - done.push(storePath); - if (!(--todo)) { - done.forEach(function (item) { - leveldown.destroy(item, function () { - if (++todo === done.length) { - fs.rmdir(base, callback); - } - }); - }); - } - }); - }); - }); -}; -var doMigrationTwo = function (db, stores, callback) { - var batches = []; - stores.bySeqStore.get(UUID_KEY, function (err, value) { - if (err) { - // no uuid key, so don't need to migrate; - return callback(); - } - batches.push({ - key: UUID_KEY, - value, - prefix: stores.metaStore, - type: 'put', - valueEncoding: 'json' - }); - batches.push({ - key: UUID_KEY, - prefix: stores.bySeqStore, - type: 'del' - }); - stores.bySeqStore.get(DOC_COUNT_KEY, function (err, value) { - if (value) { - // if no doc count key, - // just skip - // we can live with this - batches.push({ - key: DOC_COUNT_KEY, - value, - prefix: stores.metaStore, - type: 'put', - valueEncoding: 'json' - }); - batches.push({ - key: DOC_COUNT_KEY, - prefix: stores.bySeqStore, - type: 'del' - }); - } - stores.bySeqStore.get(UPDATE_SEQ_KEY, function (err, value) { - if (value) { - // if no UPDATE_SEQ_KEY - // just skip - // we've gone to far to stop. - batches.push({ - key: UPDATE_SEQ_KEY, - value, - prefix: stores.metaStore, - type: 'put', - valueEncoding: 'json' - }); - batches.push({ - key: UPDATE_SEQ_KEY, - prefix: stores.bySeqStore, - type: 'del' - }); - } - var deletedSeqs = {}; - stores.docStore.createReadStream({ - startKey: '_', - endKey: '_\xFF' - }).pipe(through(function (ch, _, next) { - if (!isLocalId(ch.key)) { - return next(); - } - batches.push({ - key: ch.key, - prefix: stores.docStore, - type: 'del' - }); - var winner = winningRev(ch.value); - Object.keys(ch.value.rev_map).forEach(function (key) { - if (key !== 'winner') { - this.push(formatSeq(ch.value.rev_map[key])); - } - }, this); - var winningSeq = ch.value.rev_map[winner]; - stores.bySeqStore.get(formatSeq(winningSeq), function (err, value) { - if (!err) { - batches.push({ - key: ch.key, - value, - prefix: stores.localStore, - type: 'put', - valueEncoding: 'json' - }); - } - next(); - }); - - })).pipe(through(function (seq, _, next) { - /* istanbul ignore if */ - if (deletedSeqs[seq]) { - return next(); - } - deletedSeqs[seq] = true; - stores.bySeqStore.get(seq, function (err, resp) { - /* istanbul ignore if */ - if (err || !isLocalId(resp._id)) { - return next(); - } - batches.push({ - key: seq, - prefix: stores.bySeqStore, - type: 'del' - }); - next(); - }); - }, function () { - db.batch(batches, callback); - })); - }); - }); - }); - -}; - -export default { - doMigrationOne, - doMigrationTwo -}; diff --git a/packages/node_modules/pouchdb-adapter-leveldb/src/requireLeveldown.js b/packages/node_modules/pouchdb-adapter-leveldb/src/requireLeveldown.js deleted file mode 100644 index 0580eb447d..0000000000 --- a/packages/node_modules/pouchdb-adapter-leveldb/src/requireLeveldown.js +++ /dev/null @@ -1,30 +0,0 @@ -// require leveldown. provide verbose output on error as it is the default -// nodejs adapter, which we do not provide for the user -/* istanbul ignore next */ -var requireLeveldown = function () { - try { - return require('leveldown'); - } catch (err) { - /* eslint no-ex-assign: 0*/ - err = err || 'leveldown import error'; - if (err.code === 'MODULE_NOT_FOUND') { - // handle leveldown not installed case - return new Error([ - 'the \'leveldown\' package is not available. install it, or,', - 'specify another storage backend using the \'db\' option' - ].join(' ')); - } else if (err.message && err.message.match('Module version mismatch')) { - // handle common user environment error - return new Error([ - err.message, - 'This generally implies that leveldown was built with a different', - 'version of node than that which is running now. You may try', - 'fully removing and reinstalling PouchDB or leveldown to resolve.' - ].join(' ')); - } - // handle general internal nodejs require error - return new Error(err.toString() + ': unable to import leveldown'); - } -}; - -export default requireLeveldown; diff --git a/packages/node_modules/pouchdb-adapter-localstorage/LICENSE b/packages/node_modules/pouchdb-adapter-localstorage/LICENSE deleted file mode 100644 index f6cd2bc808..0000000000 --- a/packages/node_modules/pouchdb-adapter-localstorage/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/node_modules/pouchdb-adapter-localstorage/README.md b/packages/node_modules/pouchdb-adapter-localstorage/README.md deleted file mode 100644 index e902e1291f..0000000000 --- a/packages/node_modules/pouchdb-adapter-localstorage/README.md +++ /dev/null @@ -1,25 +0,0 @@ -pouchdb-adapter-localstorage -====== - -PouchDB adapter using LocalStorage as its data store. Designed to run in the browser. Its adapter name is `'localstorage'`. - -### Usage - -```bash -npm install pouchdb-adapter-localstorage -``` - -```js -PouchDB.plugin(require('pouchdb-adapter-localstorage')); -var db = new PouchDB('mydb', {adapter: 'localstorage'}); -``` - -For full API documentation and guides on PouchDB, see [PouchDB.com](http://pouchdb.com/). For details on PouchDB sub-packages, see the [Custom Builds documentation](http://pouchdb.com/custom.html). - -### Source - -PouchDB and its sub-packages are distributed as a [monorepo](https://github.com/babel/babel/blob/master/doc/design/monorepo.md). - -For a full list of packages, see [the GitHub source](https://github.com/pouchdb/pouchdb/tree/master/packages). - - diff --git a/packages/node_modules/pouchdb-adapter-localstorage/package-lock.json b/packages/node_modules/pouchdb-adapter-localstorage/package-lock.json deleted file mode 100644 index dcc50db0d1..0000000000 --- a/packages/node_modules/pouchdb-adapter-localstorage/package-lock.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "pouchdb-adapter-localstorage", - "version": "7.0.0-prerelease", - "lockfileVersion": 1 -} diff --git a/packages/node_modules/pouchdb-adapter-localstorage/package.json b/packages/node_modules/pouchdb-adapter-localstorage/package.json deleted file mode 100644 index 32648ae30c..0000000000 --- a/packages/node_modules/pouchdb-adapter-localstorage/package.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "pouchdb-adapter-localstorage", - "version": "7.0.0-prerelease", - "description": "PouchDB adapter using LocalStorage as its data store.", - "main": "./lib/index.js", - "keywords": [], - "author": "Dale Harvey ", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "https://github.com/pouchdb/pouchdb.git", - "directory": "packages/node_modules/pouchdb-adapter-localstorage" - }, - "module": "./src/index.js" -} diff --git a/packages/node_modules/pouchdb-adapter-localstorage/src/index.js b/packages/node_modules/pouchdb-adapter-localstorage/src/index.js deleted file mode 100644 index ab68e2f934..0000000000 --- a/packages/node_modules/pouchdb-adapter-localstorage/src/index.js +++ /dev/null @@ -1,22 +0,0 @@ -import CoreLevelPouch from 'pouchdb-adapter-leveldb-core'; - - -import localstoragedown from 'localstorage-down'; - -function LocalStoragePouch(opts, callback) { - var _opts = Object.assign({ - db: localstoragedown - }, opts); - - CoreLevelPouch.call(this, _opts, callback); -} - -// overrides for normal LevelDB behavior on Node -LocalStoragePouch.valid = function () { - return typeof localStorage !== 'undefined'; -}; -LocalStoragePouch.use_prefix = true; - -export default function (PouchDB) { - PouchDB.adapter('localstorage', LocalStoragePouch, true); -} diff --git a/packages/node_modules/pouchdb-adapter-memory/LICENSE b/packages/node_modules/pouchdb-adapter-memory/LICENSE deleted file mode 100644 index f6cd2bc808..0000000000 --- a/packages/node_modules/pouchdb-adapter-memory/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/node_modules/pouchdb-adapter-memory/README.md b/packages/node_modules/pouchdb-adapter-memory/README.md deleted file mode 100644 index 4d30ab49ad..0000000000 --- a/packages/node_modules/pouchdb-adapter-memory/README.md +++ /dev/null @@ -1,25 +0,0 @@ -pouchdb-adapter-memory -====== - -PouchDB adapter using in-memory as its data store. Designed to run in either Node or the browser. Its adapter name is `'memory'`. - -### Usage - -```bash -npm install pouchdb-adapter-memory -``` - -```js -PouchDB.plugin(require('pouchdb-adapter-memory')); -var db = new PouchDB('mydb', {adapter: 'memory'}); -``` - -For full API documentation and guides on PouchDB, see [PouchDB.com](http://pouchdb.com/). For details on PouchDB sub-packages, see the [Custom Builds documentation](http://pouchdb.com/custom.html). - -### Source - -PouchDB and its sub-packages are distributed as a [monorepo](https://github.com/babel/babel/blob/master/doc/design/monorepo.md). - -For a full list of packages, see [the GitHub source](https://github.com/pouchdb/pouchdb/tree/master/packages). - - diff --git a/packages/node_modules/pouchdb-adapter-memory/package-lock.json b/packages/node_modules/pouchdb-adapter-memory/package-lock.json deleted file mode 100644 index 34033376b6..0000000000 --- a/packages/node_modules/pouchdb-adapter-memory/package-lock.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "pouchdb-adapter-memory", - "version": "7.0.0-prerelease", - "lockfileVersion": 1 -} diff --git a/packages/node_modules/pouchdb-adapter-memory/src/index.js b/packages/node_modules/pouchdb-adapter-memory/src/index.js deleted file mode 100644 index ea6727cc66..0000000000 --- a/packages/node_modules/pouchdb-adapter-memory/src/index.js +++ /dev/null @@ -1,22 +0,0 @@ -import CoreLevelPouch from 'pouchdb-adapter-leveldb-core'; - - -import memdown from 'memdown'; - -function MemDownPouch(opts, callback) { - var _opts = Object.assign({ - db: memdown - }, opts); - - CoreLevelPouch.call(this, _opts, callback); -} - -// overrides for normal LevelDB behavior on Node -MemDownPouch.valid = function () { - return true; -}; -MemDownPouch.use_prefix = false; - -export default function (PouchDB) { - PouchDB.adapter('memory', MemDownPouch, true); -} diff --git a/packages/node_modules/pouchdb-adapter-leveldb-core/LICENSE b/packages/node_modules/pouchdb-adapter-nodesqlite/LICENSE similarity index 100% rename from packages/node_modules/pouchdb-adapter-leveldb-core/LICENSE rename to packages/node_modules/pouchdb-adapter-nodesqlite/LICENSE diff --git a/packages/node_modules/pouchdb-adapter-memory/package.json b/packages/node_modules/pouchdb-adapter-nodesqlite/package.json similarity index 60% rename from packages/node_modules/pouchdb-adapter-memory/package.json rename to packages/node_modules/pouchdb-adapter-nodesqlite/package.json index 69201f88bb..ff2d919245 100644 --- a/packages/node_modules/pouchdb-adapter-memory/package.json +++ b/packages/node_modules/pouchdb-adapter-nodesqlite/package.json @@ -1,7 +1,7 @@ { - "name": "pouchdb-adapter-memory", + "name": "pouchdb-adapter-nodesqlite", "version": "7.0.0-prerelease", - "description": "PouchDB adapter using in-memory as its data store.", + "description": "PouchDB adapter using node sqlite as its data store.", "main": "./lib/index.js", "keywords": [], "author": "Dale Harvey ", @@ -9,7 +9,7 @@ "repository": { "type": "git", "url": "https://github.com/pouchdb/pouchdb.git", - "directory": "packages/node_modules/pouchdb-adapter-memory" + "directory": "packages/node_modules/pouchdb-adapter-nodesqlite" }, "module": "./src/index.js" } diff --git a/packages/node_modules/pouchdb-adapter-nodesqlite/src/index.js b/packages/node_modules/pouchdb-adapter-nodesqlite/src/index.js new file mode 100644 index 0000000000..7e118d0bd9 --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-nodesqlite/src/index.js @@ -0,0 +1,20 @@ +import CoreWebsqlPouch from 'pouchdb-adapter-websql-core'; + +import openDB from '@neighbourhoodie/websql'; + +function NodeSqlitePouch(opts, callback) { + var _opts = Object.assign({ + websql: openDB + }, opts); + + CoreWebsqlPouch.call(this, _opts, callback); +} + +NodeSqlitePouch.valid = function () { + return true; +}; +NodeSqlitePouch.use_prefix = false; + +export default function (PouchDB) { + PouchDB.adapter('nodesqlite', NodeSqlitePouch, true); +} diff --git a/packages/node_modules/pouchdb-adapter-leveldb/LICENSE b/packages/node_modules/pouchdb-adapter-websql-core/LICENSE similarity index 100% rename from packages/node_modules/pouchdb-adapter-leveldb/LICENSE rename to packages/node_modules/pouchdb-adapter-websql-core/LICENSE diff --git a/packages/node_modules/sublevel-pouchdb/README.md b/packages/node_modules/pouchdb-adapter-websql-core/README.md similarity index 64% rename from packages/node_modules/sublevel-pouchdb/README.md rename to packages/node_modules/pouchdb-adapter-websql-core/README.md index b59fbe8276..8320e48bf1 100644 --- a/packages/node_modules/sublevel-pouchdb/README.md +++ b/packages/node_modules/pouchdb-adapter-websql-core/README.md @@ -1,28 +1,21 @@ -sublevel-pouchdb ![semver non-compliant](https://img.shields.io/badge/semver-non--compliant-red.svg) -====== +# pouchdb-adapter-websql-core ![semver non-compliant](https://img.shields.io/badge/semver-non--compliant-red.svg) -Fork of [level-sublevel](https://github.com/dominictarr/level-sublevel) -with ony the subset of the API that PouchDB uses. Forked from v6.5.4, -commit [fa1b712](https://github.com/dominictarr/level-sublevel/commits/fa1b7121f9632b637e650cc1ec9b1723b60df864). +Underlying adapter code for WebSQL and SQLite-based PouchDB adapters. ### Usage ```bash -npm install --save-exact sublevel-pouchdb +npm install --save-exact @craftzdog/pouchdb-adapter-websql-core ``` For full API documentation and guides on PouchDB, see [PouchDB.com](http://pouchdb.com/). For details on PouchDB sub-packages, see the [Custom Builds documentation](http://pouchdb.com/custom.html). - ### Warning: semver-free zone! This package is conceptually an internal API used by PouchDB or its plugins. It does not follow semantic versioning (semver), and rather its version is pegged to PouchDB's. Use exact versions when installing, e.g. with `--save-exact`. - ### Source PouchDB and its sub-packages are distributed as a [monorepo](https://github.com/babel/babel/blob/master/doc/design/monorepo.md). For a full list of packages, see [the GitHub source](https://github.com/pouchdb/pouchdb/tree/master/packages). - - diff --git a/packages/node_modules/pouchdb-adapter-websql-core/package-lock.json b/packages/node_modules/pouchdb-adapter-websql-core/package-lock.json new file mode 100644 index 0000000000..8eafa8b8cf --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-websql-core/package-lock.json @@ -0,0 +1,119 @@ +{ + "name": "pouchdb-adapter-websql-core", + "version": "7.2.3", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "argsarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/argsarray/-/argsarray-0.0.1.tgz", + "integrity": "sha1-bnIHtOzbObCviDA/pa4ivajfYcs=" + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" + }, + "clone-buffer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz", + "integrity": "sha1-4+JbIHrE5wGvch4staFnksrD3Fg=" + }, + "immediate": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.3.0.tgz", + "integrity": "sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q==" + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "pouchdb-adapter-utils": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/pouchdb-adapter-utils/-/pouchdb-adapter-utils-7.2.2.tgz", + "integrity": "sha512-2CzZkTyTyHZkr3ePiWFMTiD5+56lnembMjaTl8ohwegM0+hYhRyJux0biAZafVxgIL4gnCUC4w2xf6WVztzKdg==", + "requires": { + "pouchdb-binary-utils": "7.2.2", + "pouchdb-collections": "7.2.2", + "pouchdb-errors": "7.2.2", + "pouchdb-md5": "7.2.2", + "pouchdb-merge": "7.2.2", + "pouchdb-utils": "7.2.2" + } + }, + "pouchdb-binary-utils": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/pouchdb-binary-utils/-/pouchdb-binary-utils-7.2.2.tgz", + "integrity": "sha512-shacxlmyHbUrNfE6FGYpfyAJx7Q0m91lDdEAaPoKZM3SzAmbtB1i+OaDNtYFztXjJl16yeudkDb3xOeokVL3Qw==", + "requires": { + "buffer-from": "1.1.1" + } + }, + "pouchdb-collections": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/pouchdb-collections/-/pouchdb-collections-7.2.2.tgz", + "integrity": "sha512-6O9zyAYlp3UdtfneiMYuOCWdUCQNo2bgdjvNsMSacQX+3g8WvIoFQCYJjZZCpTttQGb+MHeRMr8m2U95lhJTew==" + }, + "pouchdb-errors": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/pouchdb-errors/-/pouchdb-errors-7.2.2.tgz", + "integrity": "sha512-6GQsiWc+7uPfgEHeavG+7wuzH3JZW29Dnrvz8eVbDFE50kVFxNDVm3EkYHskvo5isG7/IkOx7PV7RPTA3keG3g==", + "requires": { + "inherits": "2.0.4" + } + }, + "pouchdb-json": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/pouchdb-json/-/pouchdb-json-7.2.2.tgz", + "integrity": "sha512-3b2S2ynN+aoB7aCNyDZc/4c0IAdx/ir3nsHB+/RrKE9cM3QkQYbnnE3r/RvOD1Xvr6ji/KOCBie+Pz/6sxoaug==", + "requires": { + "vuvuzela": "1.0.3" + } + }, + "pouchdb-md5": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/pouchdb-md5/-/pouchdb-md5-7.2.2.tgz", + "integrity": "sha512-c/RvLp2oSh8PLAWU5vFBnp6ejJABIdKqboZwRRUrWcfGDf+oyX8RgmJFlYlzMMOh4XQLUT1IoaDV8cwlsuryZw==", + "requires": { + "pouchdb-binary-utils": "7.2.2", + "spark-md5": "3.0.1" + } + }, + "pouchdb-merge": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/pouchdb-merge/-/pouchdb-merge-7.2.2.tgz", + "integrity": "sha512-6yzKJfjIchBaS7Tusuk8280WJdESzFfQ0sb4jeMUNnrqs4Cx3b0DIEOYTRRD9EJDM+je7D3AZZ4AT0tFw8gb4A==" + }, + "pouchdb-utils": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/pouchdb-utils/-/pouchdb-utils-7.2.2.tgz", + "integrity": "sha512-XmeM5ioB4KCfyB2MGZXu1Bb2xkElNwF1qG+zVFbQsKQij0zvepdOUfGuWvLRHxTOmt4muIuSOmWZObZa3NOgzQ==", + "requires": { + "argsarray": "0.0.1", + "clone-buffer": "1.0.0", + "immediate": "3.3.0", + "inherits": "2.0.4", + "pouchdb-collections": "7.2.2", + "pouchdb-errors": "7.2.2", + "pouchdb-md5": "7.2.2", + "uuid": "8.1.0" + } + }, + "spark-md5": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.1.tgz", + "integrity": "sha512-0tF3AGSD1ppQeuffsLDIOWlKUd3lS92tFxcsrh5Pe3ZphhnoK+oXIBTzOAThZCiuINZLvpiLH/1VS1/ANEJVig==" + }, + "uuid": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.1.0.tgz", + "integrity": "sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg==" + }, + "vuvuzela": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/vuvuzela/-/vuvuzela-1.0.3.tgz", + "integrity": "sha1-O+FF5YJxxzylUnndhR8SpoIRSws=" + } + } +} diff --git a/packages/node_modules/pouchdb-adapter-websql-core/package.json b/packages/node_modules/pouchdb-adapter-websql-core/package.json new file mode 100644 index 0000000000..f5319fe489 --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-websql-core/package.json @@ -0,0 +1,25 @@ +{ + "name": "pouchdb-adapter-websql-core", + "version": "7.2.3", + "description": "Underlying adapter code for WebSQL and SQLite-based PouchDB adapters.", + "main": "./lib/index.js", + "keywords": [], + "author": "Dale Harvey ", + "license": "Apache-2.0", + "repository": "https://github.com/pouchdb/pouchdb", + "dependencies": { + "pouchdb-adapter-utils": "^7.2.2", + "pouchdb-binary-utils": "^7.2.2", + "pouchdb-collections": "^7.2.2", + "pouchdb-errors": "^7.2.2", + "pouchdb-json": "^7.2.2", + "pouchdb-merge": "^7.2.2", + "pouchdb-utils": "^7.2.2" + }, + "module": "./lib/index.es.js", + "files": [ + "src", + "dist", + "tonic-example.js" + ] +} diff --git a/packages/node_modules/pouchdb-adapter-websql-core/src/bulkDocs.js b/packages/node_modules/pouchdb-adapter-websql-core/src/bulkDocs.js new file mode 100644 index 0000000000..301b6845b3 --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-websql-core/src/bulkDocs.js @@ -0,0 +1,338 @@ +import { + preprocessAttachments, + isLocalId, + processDocs, + parseDoc +} from 'pouchdb-adapter-utils'; +import { + compactTree +} from 'pouchdb-merge'; +import { + safeJsonParse, + safeJsonStringify +} from 'pouchdb-json'; +import { + MISSING_STUB, + createError +} from 'pouchdb-errors'; + +import { + DOC_STORE, + BY_SEQ_STORE, + ATTACH_STORE, + ATTACH_AND_SEQ_STORE +} from './constants'; + +import { + select, + stringifyDoc, + compactRevs, + websqlError, + escapeBlob +} from './utils'; + +function websqlBulkDocs(dbOpts, req, opts, api, db, websqlChanges, callback) { + var newEdits = opts.new_edits; + var userDocs = req.docs; + + // Parse the docs, give them a sequence number for the result + var docInfos = userDocs.map(function (doc) { + if (doc._id && isLocalId(doc._id)) { + return doc; + } + var newDoc = parseDoc(doc, newEdits, dbOpts); + return newDoc; + }); + + var docInfoErrors = docInfos.filter(function (docInfo) { + return docInfo.error; + }); + if (docInfoErrors.length) { + return callback(docInfoErrors[0]); + } + + var tx; + var results = new Array(docInfos.length); + var fetchedDocs = new Map(); + + var preconditionErrored; + function complete() { + if (preconditionErrored) { + return callback(preconditionErrored); + } + websqlChanges.notify(api._name); + callback(null, results); + } + + function verifyAttachment(digest, callback) { + var sql = 'SELECT count(*) as cnt FROM ' + ATTACH_STORE + + ' WHERE digest=?'; + tx.executeSql(sql, [digest], function (tx, result) { + if (result.rows.item(0).cnt === 0) { + var err = createError(MISSING_STUB, + 'unknown stub attachment with digest ' + + digest); + callback(err); + } else { + callback(); + } + }); + } + + function verifyAttachments(finish) { + var digests = []; + docInfos.forEach(function (docInfo) { + if (docInfo.data && docInfo.data._attachments) { + Object.keys(docInfo.data._attachments).forEach(function (filename) { + var att = docInfo.data._attachments[filename]; + if (att.stub) { + digests.push(att.digest); + } + }); + } + }); + if (!digests.length) { + return finish(); + } + var numDone = 0; + var err; + + function checkDone() { + if (++numDone === digests.length) { + finish(err); + } + } + digests.forEach(function (digest) { + verifyAttachment(digest, function (attErr) { + if (attErr && !err) { + err = attErr; + } + checkDone(); + }); + }); + } + + function writeDoc(docInfo, winningRev, winningRevIsDeleted, newRevIsDeleted, + isUpdate, delta, resultsIdx, callback) { + + function finish() { + var data = docInfo.data; + var deletedInt = newRevIsDeleted ? 1 : 0; + + var id = data._id; + var rev = data._rev; + var json = stringifyDoc(data); + var sql = 'INSERT INTO ' + BY_SEQ_STORE + + ' (doc_id, rev, json, deleted) VALUES (?, ?, ?, ?);'; + var sqlArgs = [id, rev, json, deletedInt]; + + // map seqs to attachment digests, which + // we will need later during compaction + function insertAttachmentMappings(seq, callback) { + var attsAdded = 0; + var attsToAdd = Object.keys(data._attachments || {}); + + if (!attsToAdd.length) { + return callback(); + } + function checkDone() { + if (++attsAdded === attsToAdd.length) { + callback(); + } + return false; // ack handling a constraint error + } + function add(att) { + var sql = 'INSERT INTO ' + ATTACH_AND_SEQ_STORE + + ' (digest, seq) VALUES (?,?)'; + var sqlArgs = [data._attachments[att].digest, seq]; + tx.executeSql(sql, sqlArgs, checkDone, checkDone); + // second callback is for a constaint error, which we ignore + // because this docid/rev has already been associated with + // the digest (e.g. when new_edits == false) + } + for (var i = 0; i < attsToAdd.length; i++) { + add(attsToAdd[i]); // do in parallel + } + } + + tx.executeSql(sql, sqlArgs, function (tx, result) { + var seq = result.insertId; + insertAttachmentMappings(seq, function () { + dataWritten(tx, seq); + }); + }, function () { + // constraint error, recover by updating instead (see #1638) + var fetchSql = select('seq', BY_SEQ_STORE, null, + 'doc_id=? AND rev=?'); + tx.executeSql(fetchSql, [id, rev], function (tx, res) { + var seq = res.rows.item(0).seq; + var sql = 'UPDATE ' + BY_SEQ_STORE + + ' SET json=?, deleted=? WHERE doc_id=? AND rev=?;'; + var sqlArgs = [json, deletedInt, id, rev]; + tx.executeSql(sql, sqlArgs, function (tx) { + insertAttachmentMappings(seq, function () { + dataWritten(tx, seq); + }); + }); + }); + return false; // ack that we've handled the error + }); + } + + function collectResults(attachmentErr) { + if (!err) { + if (attachmentErr) { + err = attachmentErr; + callback(err); + } else if (recv === attachments.length) { + finish(); + } + } + } + + var err = null; + var recv = 0; + + docInfo.data._id = docInfo.metadata.id; + docInfo.data._rev = docInfo.metadata.rev; + var attachments = Object.keys(docInfo.data._attachments || {}); + + + if (newRevIsDeleted) { + docInfo.data._deleted = true; + } + + function attachmentSaved(err) { + recv++; + collectResults(err); + } + + attachments.forEach(function (key) { + var att = docInfo.data._attachments[key]; + if (!att.stub) { + var data = att.data; + delete att.data; + att.revpos = parseInt(winningRev, 10); + var digest = att.digest; + saveAttachment(digest, data, attachmentSaved); + } else { + recv++; + collectResults(); + } + }); + + if (!attachments.length) { + finish(); + } + + function dataWritten(tx, seq) { + var id = docInfo.metadata.id; + + var revsToCompact = docInfo.stemmedRevs || []; + if (isUpdate && api.auto_compaction) { + revsToCompact = compactTree(docInfo.metadata).concat(revsToCompact); + } + if (revsToCompact.length) { + compactRevs(revsToCompact, id, tx); + } + + docInfo.metadata.seq = seq; + var rev = docInfo.metadata.rev; + delete docInfo.metadata.rev; + + var sql = isUpdate ? + 'UPDATE ' + DOC_STORE + + ' SET json=?, max_seq=?, winningseq=' + + '(SELECT seq FROM ' + BY_SEQ_STORE + + ' WHERE doc_id=' + DOC_STORE + '.id AND rev=?) WHERE id=?' + : 'INSERT INTO ' + DOC_STORE + + ' (id, winningseq, max_seq, json) VALUES (?,?,?,?);'; + var metadataStr = safeJsonStringify(docInfo.metadata); + var params = isUpdate ? + [metadataStr, seq, winningRev, id] : + [id, seq, seq, metadataStr]; + tx.executeSql(sql, params, function () { + results[resultsIdx] = { + ok: true, + id: docInfo.metadata.id, + rev + }; + fetchedDocs.set(id, docInfo.metadata); + callback(); + }); + } + } + + function websqlProcessDocs() { + processDocs(dbOpts.revs_limit, docInfos, api, fetchedDocs, tx, + results, writeDoc, opts); + } + + function fetchExistingDocs(callback) { + if (!docInfos.length) { + return callback(); + } + + var numFetched = 0; + + function checkDone() { + if (++numFetched === docInfos.length) { + callback(); + } + } + + docInfos.forEach(function (docInfo) { + if (docInfo._id && isLocalId(docInfo._id)) { + return checkDone(); // skip local docs + } + var id = docInfo.metadata.id; + tx.executeSql('SELECT json FROM ' + DOC_STORE + + ' WHERE id = ?', [id], function (tx, result) { + if (result.rows.length) { + var metadata = safeJsonParse(result.rows.item(0).json); + fetchedDocs.set(id, metadata); + } + checkDone(); + }); + }); + } + + function saveAttachment(digest, data, callback) { + var sql = 'SELECT digest FROM ' + ATTACH_STORE + ' WHERE digest=?'; + tx.executeSql(sql, [digest], function (tx, result) { + if (result.rows.length) { // attachment already exists + return callback(); + } + // we could just insert before selecting and catch the error, + // but my hunch is that it's cheaper not to serialize the blob + // from JS to C if we don't have to (TODO: confirm this) + sql = 'INSERT INTO ' + ATTACH_STORE + + ' (digest, body, escaped) VALUES (?,?,1)'; + tx.executeSql(sql, [digest, escapeBlob(data)], function () { + callback(); + }, function () { + // ignore constaint errors, means it already exists + callback(); + return false; // ack we handled the error + }); + }); + } + + preprocessAttachments(docInfos, 'binary', function (err) { + if (err) { + return callback(err); + } + db.transaction(function (txn) { + tx = txn; + verifyAttachments(function (err) { + if (err) { + preconditionErrored = err; + } else { + fetchExistingDocs(websqlProcessDocs); + } + }); + }, websqlError(callback), complete); + }); +} + +export default websqlBulkDocs; diff --git a/packages/node_modules/pouchdb-adapter-websql-core/src/constants.js b/packages/node_modules/pouchdb-adapter-websql-core/src/constants.js new file mode 100644 index 0000000000..060a053ac1 --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-websql-core/src/constants.js @@ -0,0 +1,29 @@ +function quote(str) { + return "'" + str + "'"; +} + +var ADAPTER_VERSION = 7; // used to manage migrations + +// The object stores created for each database +// DOC_STORE stores the document meta data, its revision history and state +var DOC_STORE = quote('document-store'); +// BY_SEQ_STORE stores a particular version of a document, keyed by its +// sequence id +var BY_SEQ_STORE = quote('by-sequence'); +// Where we store attachments +var ATTACH_STORE = quote('attach-store'); +var LOCAL_STORE = quote('local-store'); +var META_STORE = quote('metadata-store'); +// where we store many-to-many relations between attachment +// digests and seqs +var ATTACH_AND_SEQ_STORE = quote('attach-seq-store'); + +export { + ADAPTER_VERSION as ADAPTER_VERSION, + DOC_STORE as DOC_STORE, + BY_SEQ_STORE as BY_SEQ_STORE, + ATTACH_STORE as ATTACH_STORE, + LOCAL_STORE as LOCAL_STORE, + META_STORE as META_STORE, + ATTACH_AND_SEQ_STORE as ATTACH_AND_SEQ_STORE +}; diff --git a/packages/node_modules/pouchdb-adapter-websql-core/src/index.js b/packages/node_modules/pouchdb-adapter-websql-core/src/index.js new file mode 100644 index 0000000000..ed2a9be21e --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-websql-core/src/index.js @@ -0,0 +1,1172 @@ + +import { + clone, + pick, + filterChange, + hasLocalStorage, + changesHandler as Changes, + toPromise, + uuid +} from 'pouchdb-utils'; +import { + isDeleted, + isLocalId +} from 'pouchdb-adapter-utils'; +import { + collectConflicts, + traverseRevTree, + latest as getLatest +} from 'pouchdb-merge'; +import { + safeJsonParse, + safeJsonStringify +} from 'pouchdb-json'; +import { + binaryStringToBlobOrBuffer as binStringToBlob, + btoa +} from 'pouchdb-binary-utils'; + +import parseHexString from './parseHex'; +import websqlBulkDocs from './bulkDocs'; + +import { + MISSING_DOC, + REV_CONFLICT, + createError +} from 'pouchdb-errors'; + +import { + ADAPTER_VERSION, + DOC_STORE, + BY_SEQ_STORE, + ATTACH_STORE, + LOCAL_STORE, + META_STORE, + ATTACH_AND_SEQ_STORE +} from './constants'; + +import { + qMarks, + stringifyDoc, + unstringifyDoc, + select, + compactRevs, + websqlError, + getSize, + unescapeBlob +} from './utils'; + +import openDB from './openDatabase'; + +var websqlChanges = new Changes(); + +function fetchAttachmentsIfNecessary(doc, opts, api, txn, cb) { + var attachments = Object.keys(doc._attachments || {}); + if (!attachments.length) { + return cb && cb(); + } + var numDone = 0; + + function checkDone() { + if (++numDone === attachments.length && cb) { + cb(); + } + } + + function fetchAttachment(doc, att) { + var attObj = doc._attachments[att]; + var attOpts = {binary: opts.binary, ctx: txn}; + api._getAttachment(doc._id, att, attObj, attOpts, function (_, data) { + doc._attachments[att] = Object.assign( + pick(attObj, ['digest', 'content_type']), + { data } + ); + checkDone(); + }); + } + + attachments.forEach(function (att) { + if (opts.attachments && opts.include_docs) { + fetchAttachment(doc, att); + } else { + doc._attachments[att].stub = true; + checkDone(); + } + }); +} + +var POUCH_VERSION = 1; + +// these indexes cover the ground for most allDocs queries +var BY_SEQ_STORE_DELETED_INDEX_SQL = + 'CREATE INDEX IF NOT EXISTS \'by-seq-deleted-idx\' ON ' + + BY_SEQ_STORE + ' (seq, deleted)'; +var BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL = + 'CREATE UNIQUE INDEX IF NOT EXISTS \'by-seq-doc-id-rev\' ON ' + + BY_SEQ_STORE + ' (doc_id, rev)'; +var DOC_STORE_WINNINGSEQ_INDEX_SQL = + 'CREATE INDEX IF NOT EXISTS \'doc-winningseq-idx\' ON ' + + DOC_STORE + ' (winningseq)'; +var ATTACH_AND_SEQ_STORE_SEQ_INDEX_SQL = + 'CREATE INDEX IF NOT EXISTS \'attach-seq-seq-idx\' ON ' + + ATTACH_AND_SEQ_STORE + ' (seq)'; +var ATTACH_AND_SEQ_STORE_ATTACH_INDEX_SQL = + 'CREATE UNIQUE INDEX IF NOT EXISTS \'attach-seq-digest-idx\' ON ' + + ATTACH_AND_SEQ_STORE + ' (digest, seq)'; + +var DOC_STORE_AND_BY_SEQ_JOINER = BY_SEQ_STORE + + '.seq = ' + DOC_STORE + '.winningseq'; + +var SELECT_DOCS = BY_SEQ_STORE + '.seq AS seq, ' + + BY_SEQ_STORE + '.deleted AS deleted, ' + + BY_SEQ_STORE + '.json AS data, ' + + BY_SEQ_STORE + '.rev AS rev, ' + + DOC_STORE + '.json AS metadata'; + +function WebSqlPouch(opts, callback) { + var api = this; + var instanceId = null; + var size = getSize(opts); + var idRequests = []; + var encoding; + + api._name = opts.name; + + // extend the options here, because sqlite plugin has a ton of options + // and they are constantly changing, so it's more prudent to allow anything + var websqlOpts = Object.assign({}, opts, { + version: POUCH_VERSION, + description: opts.name, + size + }); + var openDBResult = openDB(websqlOpts); + if (openDBResult.error) { + return websqlError(callback)(openDBResult.error); + } + var db = openDBResult.db; + if (typeof db.readTransaction !== 'function') { + // doesn't exist in sqlite plugin + db.readTransaction = db.transaction; + } + + function dbCreated() { + // note the db name in case the browser upgrades to idb + if (hasLocalStorage()) { + window.localStorage['_pouch__websqldb_' + api._name] = true; + } + callback(null, api); + } + + // In this migration, we added the 'deleted' and 'local' columns to the + // by-seq and doc store tables. + // To preserve existing user data, we re-process all the existing JSON + // and add these values. + // Called migration2 because it corresponds to adapter version (db_version) #2 + function runMigration2(tx, callback) { + // index used for the join in the allDocs query + tx.executeSql(DOC_STORE_WINNINGSEQ_INDEX_SQL); + + tx.executeSql('ALTER TABLE ' + BY_SEQ_STORE + + ' ADD COLUMN deleted TINYINT(1) DEFAULT 0', [], function () { + tx.executeSql(BY_SEQ_STORE_DELETED_INDEX_SQL); + tx.executeSql('ALTER TABLE ' + DOC_STORE + + ' ADD COLUMN local TINYINT(1) DEFAULT 0', [], function () { + tx.executeSql('CREATE INDEX IF NOT EXISTS \'doc-store-local-idx\' ON ' + + DOC_STORE + ' (local, id)'); + + var sql = 'SELECT ' + DOC_STORE + '.winningseq AS seq, ' + DOC_STORE + + '.json AS metadata FROM ' + BY_SEQ_STORE + ' JOIN ' + DOC_STORE + + ' ON ' + BY_SEQ_STORE + '.seq = ' + DOC_STORE + '.winningseq'; + + tx.executeSql(sql, [], function (tx, result) { + + var deleted = []; + var local = []; + + for (var i = 0; i < result.rows.length; i++) { + var item = result.rows.item(i); + var seq = item.seq; + var metadata = JSON.parse(item.metadata); + if (isDeleted(metadata)) { + deleted.push(seq); + } + if (isLocalId(metadata.id)) { + local.push(metadata.id); + } + } + tx.executeSql('UPDATE ' + DOC_STORE + 'SET local = 1 WHERE id IN ' + + qMarks(local.length), local, function () { + tx.executeSql('UPDATE ' + BY_SEQ_STORE + + ' SET deleted = 1 WHERE seq IN ' + + qMarks(deleted.length), deleted, callback); + }); + }); + }); + }); + } + + // in this migration, we make all the local docs unversioned + function runMigration3(tx, callback) { + var local = 'CREATE TABLE IF NOT EXISTS ' + LOCAL_STORE + + ' (id UNIQUE, rev, json)'; + tx.executeSql(local, [], function () { + var sql = 'SELECT ' + DOC_STORE + '.id AS id, ' + + BY_SEQ_STORE + '.json AS data ' + + 'FROM ' + BY_SEQ_STORE + ' JOIN ' + + DOC_STORE + ' ON ' + BY_SEQ_STORE + '.seq = ' + + DOC_STORE + '.winningseq WHERE local = 1'; + tx.executeSql(sql, [], function (tx, res) { + var rows = []; + for (var i = 0; i < res.rows.length; i++) { + rows.push(res.rows.item(i)); + } + function doNext() { + if (!rows.length) { + return callback(tx); + } + var row = rows.shift(); + var rev = JSON.parse(row.data)._rev; + tx.executeSql('INSERT INTO ' + LOCAL_STORE + + ' (id, rev, json) VALUES (?,?,?)', + [row.id, rev, row.data], function (tx) { + tx.executeSql('DELETE FROM ' + DOC_STORE + ' WHERE id=?', + [row.id], function (tx) { + tx.executeSql('DELETE FROM ' + BY_SEQ_STORE + ' WHERE seq=?', + [row.seq], function () { + doNext(); + }); + }); + }); + } + doNext(); + }); + }); + } + + // in this migration, we remove doc_id_rev and just use rev + function runMigration4(tx, callback) { + + function updateRows(rows) { + function doNext() { + if (!rows.length) { + return callback(tx); + } + var row = rows.shift(); + var doc_id_rev = parseHexString(row.hex, encoding); + var idx = doc_id_rev.lastIndexOf('::'); + var doc_id = doc_id_rev.substring(0, idx); + var rev = doc_id_rev.substring(idx + 2); + var sql = 'UPDATE ' + BY_SEQ_STORE + + ' SET doc_id=?, rev=? WHERE doc_id_rev=?'; + tx.executeSql(sql, [doc_id, rev, doc_id_rev], function () { + doNext(); + }); + } + doNext(); + } + + var sql = 'ALTER TABLE ' + BY_SEQ_STORE + ' ADD COLUMN doc_id'; + tx.executeSql(sql, [], function (tx) { + var sql = 'ALTER TABLE ' + BY_SEQ_STORE + ' ADD COLUMN rev'; + tx.executeSql(sql, [], function (tx) { + tx.executeSql(BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL, [], function (tx) { + var sql = 'SELECT hex(doc_id_rev) as hex FROM ' + BY_SEQ_STORE; + tx.executeSql(sql, [], function (tx, res) { + var rows = []; + for (var i = 0; i < res.rows.length; i++) { + rows.push(res.rows.item(i)); + } + updateRows(rows); + }); + }); + }); + }); + } + + // in this migration, we add the attach_and_seq table + // for issue #2818 + function runMigration5(tx, callback) { + + function migrateAttsAndSeqs(tx) { + // need to actually populate the table. this is the expensive part, + // so as an optimization, check first that this database even + // contains attachments + var sql = 'SELECT COUNT(*) AS cnt FROM ' + ATTACH_STORE; + tx.executeSql(sql, [], function (tx, res) { + var count = res.rows.item(0).cnt; + if (!count) { + return callback(tx); + } + + var offset = 0; + var pageSize = 10; + function nextPage() { + var sql = select( + SELECT_DOCS + ', ' + DOC_STORE + '.id AS id', + [DOC_STORE, BY_SEQ_STORE], + DOC_STORE_AND_BY_SEQ_JOINER, + null, + DOC_STORE + '.id ' + ); + sql += ' LIMIT ' + pageSize + ' OFFSET ' + offset; + offset += pageSize; + tx.executeSql(sql, [], function (tx, res) { + if (!res.rows.length) { + return callback(tx); + } + var digestSeqs = {}; + function addDigestSeq(digest, seq) { + // uniq digest/seq pairs, just in case there are dups + var seqs = digestSeqs[digest] = (digestSeqs[digest] || []); + if (seqs.indexOf(seq) === -1) { + seqs.push(seq); + } + } + for (var i = 0; i < res.rows.length; i++) { + var row = res.rows.item(i); + var doc = unstringifyDoc(row.data, row.id, row.rev); + var atts = Object.keys(doc._attachments || {}); + for (var j = 0; j < atts.length; j++) { + var att = doc._attachments[atts[j]]; + addDigestSeq(att.digest, row.seq); + } + } + var digestSeqPairs = []; + Object.keys(digestSeqs).forEach(function (digest) { + var seqs = digestSeqs[digest]; + seqs.forEach(function (seq) { + digestSeqPairs.push([digest, seq]); + }); + }); + if (!digestSeqPairs.length) { + return nextPage(); + } + var numDone = 0; + digestSeqPairs.forEach(function (pair) { + var sql = 'INSERT INTO ' + ATTACH_AND_SEQ_STORE + + ' (digest, seq) VALUES (?,?)'; + tx.executeSql(sql, pair, function () { + if (++numDone === digestSeqPairs.length) { + nextPage(); + } + }); + }); + }); + } + nextPage(); + }); + } + + var attachAndRev = 'CREATE TABLE IF NOT EXISTS ' + + ATTACH_AND_SEQ_STORE + ' (digest, seq INTEGER)'; + tx.executeSql(attachAndRev, [], function (tx) { + tx.executeSql( + ATTACH_AND_SEQ_STORE_ATTACH_INDEX_SQL, [], function (tx) { + tx.executeSql( + ATTACH_AND_SEQ_STORE_SEQ_INDEX_SQL, [], + migrateAttsAndSeqs); + }); + }); + } + + // in this migration, we use escapeBlob() and unescapeBlob() + // instead of reading out the binary as HEX, which is slow + function runMigration6(tx, callback) { + var sql = 'ALTER TABLE ' + ATTACH_STORE + + ' ADD COLUMN escaped TINYINT(1) DEFAULT 0'; + tx.executeSql(sql, [], callback); + } + + // issue #3136, in this migration we need a "latest seq" as well + // as the "winning seq" in the doc store + function runMigration7(tx, callback) { + var sql = 'ALTER TABLE ' + DOC_STORE + + ' ADD COLUMN max_seq INTEGER'; + tx.executeSql(sql, [], function (tx) { + var sql = 'UPDATE ' + DOC_STORE + ' SET max_seq=(SELECT MAX(seq) FROM ' + + BY_SEQ_STORE + ' WHERE doc_id=id)'; + tx.executeSql(sql, [], function (tx) { + // add unique index after filling, else we'll get a constraint + // error when we do the ALTER TABLE + var sql = + 'CREATE UNIQUE INDEX IF NOT EXISTS \'doc-max-seq-idx\' ON ' + + DOC_STORE + ' (max_seq)'; + tx.executeSql(sql, [], callback); + }); + }); + } + + function checkEncoding(tx, cb) { + // UTF-8 on chrome/android, UTF-16 on safari < 7.1 + tx.executeSql(`SELECT HEX('a') AS hex`, [], function (tx, res) { + var hex = res.rows.item(0).hex; + encoding = hex.length === 2 ? 'UTF-8' : 'UTF-16'; + cb(); + } + ); + } + + function onGetInstanceId() { + while (idRequests.length > 0) { + var idCallback = idRequests.pop(); + idCallback(null, instanceId); + } + } + + function onGetVersion(tx, dbVersion) { + if (dbVersion === 0) { + // initial schema + + var meta = 'CREATE TABLE IF NOT EXISTS ' + META_STORE + + ' (dbid, db_version INTEGER)'; + var attach = 'CREATE TABLE IF NOT EXISTS ' + ATTACH_STORE + + ' (digest UNIQUE, escaped TINYINT(1), body BLOB)'; + var attachAndRev = 'CREATE TABLE IF NOT EXISTS ' + + ATTACH_AND_SEQ_STORE + ' (digest, seq INTEGER)'; + // TODO: migrate winningseq to INTEGER + var doc = 'CREATE TABLE IF NOT EXISTS ' + DOC_STORE + + ' (id unique, json, winningseq, max_seq INTEGER UNIQUE)'; + var seq = 'CREATE TABLE IF NOT EXISTS ' + BY_SEQ_STORE + + ' (seq INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, ' + + 'json, deleted TINYINT(1), doc_id, rev)'; + var local = 'CREATE TABLE IF NOT EXISTS ' + LOCAL_STORE + + ' (id UNIQUE, rev, json)'; + + // creates + tx.executeSql(attach); + tx.executeSql(local); + tx.executeSql(attachAndRev, [], function () { + tx.executeSql(ATTACH_AND_SEQ_STORE_SEQ_INDEX_SQL); + tx.executeSql(ATTACH_AND_SEQ_STORE_ATTACH_INDEX_SQL); + }); + tx.executeSql(doc, [], function () { + tx.executeSql(DOC_STORE_WINNINGSEQ_INDEX_SQL); + tx.executeSql(seq, [], function () { + tx.executeSql(BY_SEQ_STORE_DELETED_INDEX_SQL); + tx.executeSql(BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL); + tx.executeSql(meta, [], function () { + // mark the db version, and new dbid + var initSeq = 'INSERT INTO ' + META_STORE + + ' (db_version, dbid) VALUES (?,?)'; + instanceId = uuid(); + var initSeqArgs = [ADAPTER_VERSION, instanceId]; + tx.executeSql(initSeq, initSeqArgs, function () { + onGetInstanceId(); + }); + }); + }); + }); + } else { // version > 0 + + var setupDone = function () { + var migrated = dbVersion < ADAPTER_VERSION; + if (migrated) { + // update the db version within this transaction + tx.executeSql('UPDATE ' + META_STORE + ' SET db_version = ' + + ADAPTER_VERSION); + } + // notify db.id() callers + var sql = 'SELECT dbid FROM ' + META_STORE; + tx.executeSql(sql, [], function (tx, result) { + instanceId = result.rows.item(0).dbid; + onGetInstanceId(); + }); + }; + + // would love to use promises here, but then websql + // ends the transaction early + var tasks = [ + runMigration2, + runMigration3, + runMigration4, + runMigration5, + runMigration6, + runMigration7, + setupDone + ]; + + // run each migration sequentially + var i = dbVersion; + var nextMigration = function (tx) { + tasks[i - 1](tx, nextMigration); + i++; + }; + nextMigration(tx); + } + } + + function setup() { + db.transaction(function (tx) { + // first check the encoding + checkEncoding(tx, function () { + // then get the version + fetchVersion(tx); + }); + }, websqlError(callback), dbCreated); + } + + function fetchVersion(tx) { + var sql = 'SELECT sql FROM sqlite_master WHERE tbl_name = ' + META_STORE; + tx.executeSql(sql, [], function (tx, result) { + if (!result.rows.length) { + // database hasn't even been created yet (version 0) + onGetVersion(tx, 0); + } else if (!/db_version/.test(result.rows.item(0).sql)) { + // table was created, but without the new db_version column, + // so add it. + tx.executeSql('ALTER TABLE ' + META_STORE + + ' ADD COLUMN db_version INTEGER', [], function () { + // before version 2, this column didn't even exist + onGetVersion(tx, 1); + }); + } else { // column exists, we can safely get it + tx.executeSql('SELECT db_version FROM ' + META_STORE, + [], function (tx, result) { + var dbVersion = result.rows.item(0).db_version; + onGetVersion(tx, dbVersion); + }); + } + }); + } + + setup(); + + function getMaxSeq(tx, callback) { + var sql = 'SELECT MAX(seq) AS seq FROM ' + BY_SEQ_STORE; + tx.executeSql(sql, [], function (tx, res) { + var updateSeq = res.rows.item(0).seq || 0; + callback(updateSeq); + }); + } + + function countDocs(tx, callback) { + // count the total rows + var sql = select( + 'COUNT(' + DOC_STORE + '.id) AS \'num\'', + [DOC_STORE, BY_SEQ_STORE], + DOC_STORE_AND_BY_SEQ_JOINER, + BY_SEQ_STORE + '.deleted=0'); + + tx.executeSql(sql, [], function (tx, result) { + callback(result.rows.item(0).num); + }); + } + + api._remote = false; + api.type = function () { + return 'websql'; + }; + + api._id = toPromise(function (callback) { + callback(null, instanceId); + }); + + api._info = function (callback) { + var seq; + var docCount; + db.readTransaction(function (tx) { + getMaxSeq(tx, function (theSeq) { + seq = theSeq; + }); + countDocs(tx, function (theDocCount) { + docCount = theDocCount; + }); + }, websqlError(callback), function () { + callback(null, { + doc_count: docCount, + update_seq: seq, + websql_encoding: encoding + }); + }); + }; + + api._bulkDocs = function (req, reqOpts, callback) { + websqlBulkDocs(opts, req, reqOpts, api, db, websqlChanges, callback); + }; + + function latest(tx, id, rev, callback, finish) { + var sql = select( + SELECT_DOCS, + [DOC_STORE, BY_SEQ_STORE], + DOC_STORE_AND_BY_SEQ_JOINER, + DOC_STORE + '.id=?'); + var sqlArgs = [id]; + + tx.executeSql(sql, sqlArgs, function (a, results) { + if (!results.rows.length) { + var err = createError(MISSING_DOC, 'missing'); + return finish(err); + } + var item = results.rows.item(0); + var metadata = safeJsonParse(item.metadata); + callback(getLatest(rev, metadata)); + }); + } + + api._get = function (id, opts, callback) { + var doc; + var metadata; + var tx = opts.ctx; + if (!tx) { + return db.readTransaction(function (txn) { + api._get(id, Object.assign({ctx: txn}, opts), callback); + }); + } + + function finish(err) { + callback(err, {doc, metadata, ctx: tx}); + } + + var sql; + var sqlArgs; + + if (!opts.rev) { + sql = select( + SELECT_DOCS, + [DOC_STORE, BY_SEQ_STORE], + DOC_STORE_AND_BY_SEQ_JOINER, + DOC_STORE + '.id=?'); + sqlArgs = [id]; + } else if (opts.latest) { + latest(tx, id, opts.rev, function (latestRev) { + opts.latest = false; + opts.rev = latestRev; + api._get(id, opts, callback); + }, finish); + return; + } else { + sql = select( + SELECT_DOCS, + [DOC_STORE, BY_SEQ_STORE], + DOC_STORE + '.id=' + BY_SEQ_STORE + '.doc_id', + [BY_SEQ_STORE + '.doc_id=?', BY_SEQ_STORE + '.rev=?']); + sqlArgs = [id, opts.rev]; + } + + tx.executeSql(sql, sqlArgs, function (a, results) { + if (!results.rows.length) { + var missingErr = createError(MISSING_DOC, 'missing'); + return finish(missingErr); + } + var item = results.rows.item(0); + metadata = safeJsonParse(item.metadata); + if (item.deleted && !opts.rev) { + var deletedErr = createError(MISSING_DOC, 'deleted'); + return finish(deletedErr); + } + doc = unstringifyDoc(item.data, metadata.id, item.rev); + finish(); + }); + }; + + api._allDocs = function (opts, callback) { + var results = []; + var totalRows; + var updateSeq; + + var start = 'startkey' in opts ? opts.startkey : false; + var end = 'endkey' in opts ? opts.endkey : false; + var key = 'key' in opts ? opts.key : false; + var keys = 'keys' in opts ? opts.keys : false; + var descending = 'descending' in opts ? opts.descending : false; + var limit = 'limit' in opts ? opts.limit : -1; + var offset = 'skip' in opts ? opts.skip : 0; + var inclusiveEnd = opts.inclusive_end !== false; + + var sqlArgs = []; + var criteria = []; + var keyChunks = []; + if (keys) { + + var destinctKeys = []; + keys.forEach(function (key) { + if (destinctKeys.indexOf(key) === -1) { + destinctKeys.push(key); + } + }); + + for (var index = 0; index < destinctKeys.length; index += 999) { + var chunk = destinctKeys.slice(index, index + 999); + if (chunk.length > 0) { + keyChunks.push(chunk); + } + } + + } else if (key !== false) { + criteria.push(DOC_STORE + '.id = ?'); + sqlArgs.push(key); + } else if (start !== false || end !== false) { + if (start !== false) { + criteria.push(DOC_STORE + '.id ' + (descending ? '<=' : '>=') + ' ?'); + sqlArgs.push(start); + } + if (end !== false) { + var comparator = descending ? '>' : '<'; + if (inclusiveEnd) { + comparator += '='; + } + criteria.push(DOC_STORE + '.id ' + comparator + ' ?'); + sqlArgs.push(end); + } + if (key !== false) { + criteria.push(DOC_STORE + '.id = ?'); + sqlArgs.push(key); + } + } + + if (!keys) { + // report deleted if keys are specified + criteria.push(BY_SEQ_STORE + '.deleted = 0'); + } + + db.readTransaction(function (tx) { + // count the docs in parallel to other operations + countDocs(tx, function (docCount) { + totalRows = docCount; + }); + + /* istanbul ignore if */ + if (opts.update_seq) { + // get max sequence in parallel to other operations + getMaxSeq(tx, function (theSeq) { + updateSeq = theSeq; + }); + } + + if (limit === 0) { + return; + } + + if (keys) { + + var finishedCount = 0; + var allRows = []; + keyChunks.forEach(function (keyChunk) { + + sqlArgs = []; + criteria = []; + var bindingStr = ""; + keyChunk.forEach(function () { + bindingStr += '?,'; + }); + bindingStr = bindingStr.substring(0, bindingStr.length - 1); // keys is never empty + criteria.push(DOC_STORE + '.id IN (' + bindingStr + ')'); + sqlArgs = sqlArgs.concat(keyChunk); + + var sql = select( + SELECT_DOCS, + [DOC_STORE, BY_SEQ_STORE], + DOC_STORE_AND_BY_SEQ_JOINER, + criteria, + DOC_STORE + '.id ' + (descending ? 'DESC' : 'ASC') + ); + sql += ' LIMIT ' + limit + ' OFFSET ' + offset; + tx.executeSql(sql, sqlArgs, function (tx, result) { + finishedCount++; + for (var index = 0; index < result.rows.length; index++) { + allRows.push(result.rows.item(index)); + } + if (finishedCount === keyChunks.length) { + processResult(allRows); + } + + }); + + }); + + + } else { + + // do a single query to fetch the documents + var sql = select( + SELECT_DOCS, + [DOC_STORE, BY_SEQ_STORE], + DOC_STORE_AND_BY_SEQ_JOINER, + criteria, + DOC_STORE + '.id ' + (descending ? 'DESC' : 'ASC') + ); + sql += ' LIMIT ' + limit + ' OFFSET ' + offset; + tx.executeSql(sql, sqlArgs, function (tx, result) { + var rows = []; + for (var index = 0; index < result.rows.length; index++) { + rows.push(result.rows.item(index)); + } + processResult(rows); + }); + + } + + function processResult(rows) { + + for (var i = 0, l = rows.length; i < l; i++) { + var item = rows[i]; + var metadata = safeJsonParse(item.metadata); + var id = metadata.id; + var data = unstringifyDoc(item.data, id, item.rev); + var winningRev = data._rev; + var doc = { + id, + key: id, + value: {rev: winningRev} + }; + if (opts.include_docs) { + doc.doc = data; + doc.doc._rev = winningRev; + if (opts.conflicts) { + var conflicts = collectConflicts(metadata); + if (conflicts.length) { + doc.doc._conflicts = conflicts; + } + } + fetchAttachmentsIfNecessary(doc.doc, opts, api, tx); + } + if (item.deleted) { + if (keys) { + doc.value.deleted = true; + doc.doc = null; + } else { + // propably should not happen + continue; + } + } + if (!keys) { + results.push(doc); + } else { + var index = keys.indexOf(id, index); + do { + results[index] = doc; + index = keys.indexOf(id, index + 1); + } while (index > -1 && index < keys.length); + } + } + if (keys) { + keys.forEach(function (key, index) { + if (!results[index]) { + results[index] = {key, error: 'not_found'}; + } + }); + } + + } + + + }, websqlError(callback), function () { + var returnVal = { + total_rows: totalRows, + offset: opts.skip, + rows: results + }; + + /* istanbul ignore if */ + if (opts.update_seq) { + returnVal.update_seq = updateSeq; + } + callback(null, returnVal); + }); + }; + + api._changes = function (opts) { + opts = clone(opts); + + if (opts.continuous) { + var id = api._name + ':' + uuid(); + websqlChanges.addListener(api._name, id, api, opts); + websqlChanges.notify(api._name); + return { + cancel: function () { + websqlChanges.removeListener(api._name, id); + } + }; + } + + var descending = opts.descending; + + // Ignore the `since` parameter when `descending` is true + opts.since = opts.since && !descending ? opts.since : 0; + + var limit = 'limit' in opts ? opts.limit : -1; + if (limit === 0) { + limit = 1; // per CouchDB _changes spec + } + + var results = []; + var numResults = 0; + + function fetchChanges() { + + var selectStmt = + DOC_STORE + '.json AS metadata, ' + + DOC_STORE + '.max_seq AS maxSeq, ' + + BY_SEQ_STORE + '.json AS winningDoc, ' + + BY_SEQ_STORE + '.rev AS winningRev '; + + var from = DOC_STORE + ' JOIN ' + BY_SEQ_STORE; + + var joiner = DOC_STORE + '.id=' + BY_SEQ_STORE + '.doc_id' + + ' AND ' + DOC_STORE + '.winningseq=' + BY_SEQ_STORE + '.seq'; + + var criteria = ['maxSeq > ?']; + var sqlArgs = [opts.since]; + + if (opts.doc_ids) { + criteria.push(DOC_STORE + '.id IN ' + qMarks(opts.doc_ids.length)); + sqlArgs = sqlArgs.concat(opts.doc_ids); + } + + var orderBy = 'maxSeq ' + (descending ? 'DESC' : 'ASC'); + + var sql = select(selectStmt, from, joiner, criteria, orderBy); + + var filter = filterChange(opts); + if (!opts.view && !opts.filter) { + // we can just limit in the query + sql += ' LIMIT ' + limit; + } + + var lastSeq = opts.since || 0; + db.readTransaction(function (tx) { + tx.executeSql(sql, sqlArgs, function (tx, result) { + function reportChange(change) { + return function () { + opts.onChange(change); + }; + } + for (var i = 0, l = result.rows.length; i < l; i++) { + var item = result.rows.item(i); + var metadata = safeJsonParse(item.metadata); + lastSeq = item.maxSeq; + + var doc = unstringifyDoc(item.winningDoc, metadata.id, + item.winningRev); + var change = opts.processChange(doc, metadata, opts); + change.seq = item.maxSeq; + + var filtered = filter(change); + if (typeof filtered === 'object') { + return opts.complete(filtered); + } + + if (filtered) { + numResults++; + if (opts.return_docs) { + results.push(change); + } + // process the attachment immediately + // for the benefit of live listeners + if (opts.attachments && opts.include_docs) { + fetchAttachmentsIfNecessary(doc, opts, api, tx, + reportChange(change)); + } else { + reportChange(change)(); + } + } + if (numResults === limit) { + break; + } + } + }); + }, websqlError(opts.complete), function () { + if (!opts.continuous) { + opts.complete(null, { + results, + last_seq: lastSeq + }); + } + }); + } + + fetchChanges(); + }; + + api._close = function (callback) { + //WebSQL databases do not need to be closed + callback(); + }; + + api._getAttachment = function (docId, attachId, attachment, opts, callback) { + var res; + var tx = opts.ctx; + var digest = attachment.digest; + var type = attachment.content_type; + var sql = 'SELECT escaped, ' + + 'CASE WHEN escaped = 1 THEN body ELSE HEX(body) END AS body FROM ' + + ATTACH_STORE + ' WHERE digest=?'; + tx.executeSql(sql, [digest], function (tx, result) { + // websql has a bug where \u0000 causes early truncation in strings + // and blobs. to work around this, we used to use the hex() function, + // but that's not performant. after migration 6, we remove \u0000 + // and add it back in afterwards + var item = result.rows.item(0); + var data = item.escaped ? unescapeBlob(item.body) : + parseHexString(item.body, encoding); + if (opts.binary) { + res = binStringToBlob(data, type); + } else { + res = btoa(data); + } + callback(null, res); + }); + }; + + api._getRevisionTree = function (docId, callback) { + db.readTransaction(function (tx) { + var sql = 'SELECT json AS metadata FROM ' + DOC_STORE + ' WHERE id = ?'; + tx.executeSql(sql, [docId], function (tx, result) { + if (!result.rows.length) { + callback(createError(MISSING_DOC)); + } else { + var data = safeJsonParse(result.rows.item(0).metadata); + callback(null, data.rev_tree); + } + }); + }); + }; + + api._doCompaction = function (docId, revs, callback) { + if (!revs.length) { + return callback(); + } + db.transaction(function (tx) { + + // update doc store + var sql = 'SELECT json AS metadata FROM ' + DOC_STORE + ' WHERE id = ?'; + tx.executeSql(sql, [docId], function (tx, result) { + var metadata = safeJsonParse(result.rows.item(0).metadata); + traverseRevTree(metadata.rev_tree, function (isLeaf, pos, + revHash, ctx, opts) { + var rev = pos + '-' + revHash; + if (revs.indexOf(rev) !== -1) { + opts.status = 'missing'; + } + }); + + var sql = 'UPDATE ' + DOC_STORE + ' SET json = ? WHERE id = ?'; + tx.executeSql(sql, [safeJsonStringify(metadata), docId]); + }); + + compactRevs(revs, docId, tx); + }, websqlError(callback), function () { + callback(); + }); + }; + + api._getLocal = function (id, callback) { + db.readTransaction(function (tx) { + var sql = 'SELECT json, rev FROM ' + LOCAL_STORE + ' WHERE id=?'; + tx.executeSql(sql, [id], function (tx, res) { + if (res.rows.length) { + var item = res.rows.item(0); + var doc = unstringifyDoc(item.json, id, item.rev); + callback(null, doc); + } else { + callback(createError(MISSING_DOC)); + } + }); + }); + }; + + api._putLocal = function (doc, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + delete doc._revisions; // ignore this, trust the rev + var oldRev = doc._rev; + var id = doc._id; + var newRev; + if (!oldRev) { + newRev = doc._rev = '0-1'; + } else { + newRev = doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1); + } + var json = stringifyDoc(doc); + + var ret; + function putLocal(tx) { + var sql; + var values; + if (oldRev) { + sql = 'UPDATE ' + LOCAL_STORE + ' SET rev=?, json=? ' + + 'WHERE id=? AND rev=?'; + values = [newRev, json, id, oldRev]; + } else { + sql = 'INSERT INTO ' + LOCAL_STORE + ' (id, rev, json) VALUES (?,?,?)'; + values = [id, newRev, json]; + } + tx.executeSql(sql, values, function (tx, res) { + if (res.rowsAffected) { + ret = {ok: true, id, rev: newRev}; + if (opts.ctx) { // return immediately + callback(null, ret); + } + } else { + callback(createError(REV_CONFLICT)); + } + }, function () { + callback(createError(REV_CONFLICT)); + return false; // ack that we handled the error + }); + } + + if (opts.ctx) { + putLocal(opts.ctx); + } else { + db.transaction(putLocal, websqlError(callback), function () { + if (ret) { + callback(null, ret); + } + }); + } + }; + + api._removeLocal = function (doc, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + var ret; + + function removeLocal(tx) { + var sql = 'DELETE FROM ' + LOCAL_STORE + ' WHERE id=? AND rev=?'; + var params = [doc._id, doc._rev]; + tx.executeSql(sql, params, function (tx, res) { + if (!res.rowsAffected) { + return callback(createError(MISSING_DOC)); + } + ret = {ok: true, id: doc._id, rev: '0-0'}; + if (opts.ctx) { // return immediately + callback(null, ret); + } + }); + } + + if (opts.ctx) { + removeLocal(opts.ctx); + } else { + db.transaction(removeLocal, websqlError(callback), function () { + if (ret) { + callback(null, ret); + } + }); + } + }; + + api._destroy = function (opts, callback) { + websqlChanges.removeAllListeners(api._name); + db.transaction(function (tx) { + var stores = [DOC_STORE, BY_SEQ_STORE, ATTACH_STORE, META_STORE, + LOCAL_STORE, ATTACH_AND_SEQ_STORE]; + stores.forEach(function (store) { + tx.executeSql('DROP TABLE IF EXISTS ' + store, []); + }); + }, websqlError(callback), function () { + if (hasLocalStorage()) { + delete window.localStorage['_pouch__websqldb_' + api._name]; + delete window.localStorage[api._name]; + } + callback(null, {'ok': true}); + }); + }; +} + +export default WebSqlPouch; diff --git a/packages/node_modules/pouchdb-adapter-websql-core/src/openDatabase.js b/packages/node_modules/pouchdb-adapter-websql-core/src/openDatabase.js new file mode 100644 index 0000000000..9a94c29eb1 --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-websql-core/src/openDatabase.js @@ -0,0 +1,32 @@ +'use strict'; + + +var cachedDatabases = new Map(); + +// openDatabase passed in through opts (e.g. for node-websql) +function openDatabaseWithOpts(opts) { + return opts.websql(opts.name, opts.version, opts.description, opts.size); +} + +function openDBSafely(opts) { + try { + return { + db: openDatabaseWithOpts(opts) + }; + } catch (err) { + return { + error: err + }; + } +} + +function openDB(opts) { + var cachedResult = cachedDatabases.get(opts.name); + if (!cachedResult) { + cachedResult = openDBSafely(opts); + cachedDatabases.set(opts.name, cachedResult); + } + return cachedResult; +} + +export default openDB; diff --git a/packages/node_modules/pouchdb-adapter-websql-core/src/parseHex.js b/packages/node_modules/pouchdb-adapter-websql-core/src/parseHex.js new file mode 100644 index 0000000000..18440a953b --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-websql-core/src/parseHex.js @@ -0,0 +1,65 @@ +// +// Parsing hex strings. Yeah. +// +// So basically we need this because of a bug in WebSQL: +// https://code.google.com/p/chromium/issues/detail?id=422690 +// https://bugs.webkit.org/show_bug.cgi?id=137637 +// +// UTF-8 and UTF-16 are provided as separate functions +// for meager performance improvements +// + +function decodeUtf8(str) { + return decodeURIComponent(escape(str)); +} + +function hexToInt(charCode) { + // '0'-'9' is 48-57 + // 'A'-'F' is 65-70 + // SQLite will only give us uppercase hex + return charCode < 65 ? (charCode - 48) : (charCode - 55); +} + + +// Example: +// pragma encoding=utf8; +// select hex('A'); +// returns '41' +function parseHexUtf8(str, start, end) { + var result = ''; + while (start < end) { + result += String.fromCharCode( + (hexToInt(str.charCodeAt(start++)) << 4) | + hexToInt(str.charCodeAt(start++))); + } + return result; +} + +// Example: +// pragma encoding=utf16; +// select hex('A'); +// returns '4100' +// notice that the 00 comes after the 41 (i.e. it's swizzled) +function parseHexUtf16(str, start, end) { + var result = ''; + while (start < end) { + // UTF-16, so swizzle the bytes + result += String.fromCharCode( + (hexToInt(str.charCodeAt(start + 2)) << 12) | + (hexToInt(str.charCodeAt(start + 3)) << 8) | + (hexToInt(str.charCodeAt(start)) << 4) | + hexToInt(str.charCodeAt(start + 1))); + start += 4; + } + return result; +} + +function parseHexString(str, encoding) { + if (encoding === 'UTF-8') { + return decodeUtf8(parseHexUtf8(str, 0, str.length)); + } else { + return parseHexUtf16(str, 0, str.length); + } +} + +export default parseHexString; diff --git a/packages/node_modules/pouchdb-adapter-websql-core/src/utils.js b/packages/node_modules/pouchdb-adapter-websql-core/src/utils.js new file mode 100644 index 0000000000..582a64a2ac --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-websql-core/src/utils.js @@ -0,0 +1,191 @@ +import { createError, WSQ_ERROR } from 'pouchdb-errors'; +import { guardedConsole } from 'pouchdb-utils'; + +import { + BY_SEQ_STORE, + ATTACH_STORE, + ATTACH_AND_SEQ_STORE +} from './constants'; + +// escapeBlob and unescapeBlob are workarounds for a websql bug: +// https://code.google.com/p/chromium/issues/detail?id=422690 +// https://bugs.webkit.org/show_bug.cgi?id=137637 +// The goal is to never actually insert the \u0000 character +// in the database. +function escapeBlob(str) { + /* eslint-disable no-control-regex */ + return str + .replace(/\u0002/g, '\u0002\u0002') + .replace(/\u0001/g, '\u0001\u0002') + .replace(/\u0000/g, '\u0001\u0001'); + /* eslint-enable no-control-regex */ +} + +function unescapeBlob(str) { + /* eslint-disable no-control-regex */ + return str + .replace(/\u0001\u0001/g, '\u0000') + .replace(/\u0001\u0002/g, '\u0001') + .replace(/\u0002\u0002/g, '\u0002'); + /* eslint-enable no-control-regex */ +} + +function stringifyDoc(doc) { + // don't bother storing the id/rev. it uses lots of space, + // in persistent map/reduce especially + delete doc._id; + delete doc._rev; + return JSON.stringify(doc); +} + +function unstringifyDoc(doc, id, rev) { + doc = JSON.parse(doc); + doc._id = id; + doc._rev = rev; + return doc; +} + +// question mark groups IN queries, e.g. 3 -> '(?,?,?)' +function qMarks(num) { + var s = '('; + while (num--) { + s += '?'; + if (num) { + s += ','; + } + } + return s + ')'; +} + +function select(selector, table, joiner, where, orderBy) { + return 'SELECT ' + selector + ' FROM ' + + (typeof table === 'string' ? table : table.join(' JOIN ')) + + (joiner ? (' ON ' + joiner) : '') + + (where ? (' WHERE ' + + (typeof where === 'string' ? where : where.join(' AND '))) : '') + + (orderBy ? (' ORDER BY ' + orderBy) : ''); +} + +function compactRevs(revs, docId, tx) { + + if (!revs.length) { + return; + } + + var numDone = 0; + var seqs = []; + + function checkDone() { + if (++numDone === revs.length) { // done + deleteOrphans(); + } + } + + function deleteOrphans() { + // find orphaned attachment digests + + if (!seqs.length) { + return; + } + + var sql = 'SELECT DISTINCT digest AS digest FROM ' + + ATTACH_AND_SEQ_STORE + ' WHERE seq IN ' + qMarks(seqs.length); + + tx.executeSql(sql, seqs, function (tx, res) { + + var digestsToCheck = []; + for (var i = 0; i < res.rows.length; i++) { + digestsToCheck.push(res.rows.item(i).digest); + } + if (!digestsToCheck.length) { + return; + } + + var sql = 'DELETE FROM ' + ATTACH_AND_SEQ_STORE + + ' WHERE seq IN (' + + seqs.map(function () { return '?'; }).join(',') + + ')'; + tx.executeSql(sql, seqs, function (tx) { + + var sql = 'SELECT digest FROM ' + ATTACH_AND_SEQ_STORE + + ' WHERE digest IN (' + + digestsToCheck.map(function () { return '?'; }).join(',') + + ')'; + tx.executeSql(sql, digestsToCheck, function (tx, res) { + var nonOrphanedDigests = new Set(); + for (var i = 0; i < res.rows.length; i++) { + nonOrphanedDigests.add(res.rows.item(i).digest); + } + digestsToCheck.forEach(function (digest) { + if (nonOrphanedDigests.has(digest)) { + return; + } + tx.executeSql( + 'DELETE FROM ' + ATTACH_AND_SEQ_STORE + ' WHERE digest=?', + [digest]); + tx.executeSql( + 'DELETE FROM ' + ATTACH_STORE + ' WHERE digest=?', [digest]); + }); + }); + }); + }); + } + + // update by-seq and attach stores in parallel + revs.forEach(function (rev) { + var sql = 'SELECT seq FROM ' + BY_SEQ_STORE + + ' WHERE doc_id=? AND rev=?'; + + tx.executeSql(sql, [docId, rev], function (tx, res) { + if (!res.rows.length) { // already deleted + return checkDone(); + } + var seq = res.rows.item(0).seq; + seqs.push(seq); + + tx.executeSql( + 'DELETE FROM ' + BY_SEQ_STORE + ' WHERE seq=?', [seq], checkDone); + }); + }); +} + +function websqlError(callback) { + return function (event) { + guardedConsole('error', 'WebSQL threw an error', event); + // event may actually be a SQLError object, so report is as such + var errorNameMatch = event && event.constructor.toString() + .match(/function ([^(]+)/); + var errorName = (errorNameMatch && errorNameMatch[1]) || event.type; + var errorReason = event.target || event.message; + callback(createError(WSQ_ERROR, errorReason, errorName)); + }; +} + +function getSize(opts) { + if ('size' in opts) { + // triggers immediate popup in iOS, fixes #2347 + // e.g. 5000001 asks for 5 MB, 10000001 asks for 10 MB, + return opts.size * 1000000; + } + // In iOS, doesn't matter as long as it's <= 5000000. + // Except that if you request too much, our tests fail + // because of the native "do you accept?" popup. + // In Android <=4.3, this value is actually used as an + // honest-to-god ceiling for data, so we need to + // set it to a decently high number. + var isAndroid = typeof navigator !== 'undefined' && + /Android/.test(navigator.userAgent); + return isAndroid ? 5000000 : 1; // in PhantomJS, if you use 0 it will crash +} + +export { + escapeBlob, + unescapeBlob, + stringifyDoc, + unstringifyDoc, + qMarks, + select, + compactRevs, + getSize, + websqlError +}; diff --git a/packages/node_modules/pouchdb-browser/README.md b/packages/node_modules/pouchdb-browser/README.md index 7fda5cf953..83598840f2 100644 --- a/packages/node_modules/pouchdb-browser/README.md +++ b/packages/node_modules/pouchdb-browser/README.md @@ -6,7 +6,7 @@ PouchDB, the browser-only edition. A preset representing the PouchDB code that r The `pouchdb-browser` preset contains the version of PouchDB that is designed for the browser. In particular, it ships with the IndexedDB and WebSQL adapters as its default adapters. It also contains the replication, HTTP, and map/reduce plugins. Use this preset if you only want to use PouchDB in the browser, -and don't want to use it in Node.js. (E.g. to avoid installing LevelDB.) +and don't want to use it in Node.js. ### Usage diff --git a/packages/node_modules/pouchdb-errors/src/index.js b/packages/node_modules/pouchdb-errors/src/index.js index 7fe57bd0ff..69ffe95cb5 100644 --- a/packages/node_modules/pouchdb-errors/src/index.js +++ b/packages/node_modules/pouchdb-errors/src/index.js @@ -34,7 +34,6 @@ const NOT_AN_OBJECT = { status:400, name:'bad_request', message:'Document must b const DB_MISSING = { status:404, name:'not_found', message:'Database not found' }; const IDB_ERROR = { status:500, name:'indexed_db_went_bad', message:'unknown' }; const WSQ_ERROR = { status:500, name:'web_sql_went_bad', message:'unknown' }; -const LDB_ERROR = { status:500, name:'levelDB_went_went_bad', message:'unknown' }; const FORBIDDEN = { status:403, name:'forbidden', message:'Forbidden by design doc validate_doc_update function' }; const INVALID_REV = { status:400, name:'bad_request', message:'Invalid rev format' }; const FILE_EXISTS = { status:412, name:'file_exists', message:'The database could not be created, the file already exists.' }; @@ -117,7 +116,6 @@ export { NOT_AN_OBJECT, DB_MISSING, WSQ_ERROR, - LDB_ERROR, FORBIDDEN, INVALID_REV, FILE_EXISTS, diff --git a/packages/node_modules/pouchdb-for-coverage/src/errors.js b/packages/node_modules/pouchdb-for-coverage/src/errors.js index 49bd4af439..6fdd493f99 100644 --- a/packages/node_modules/pouchdb-for-coverage/src/errors.js +++ b/packages/node_modules/pouchdb-for-coverage/src/errors.js @@ -16,7 +16,6 @@ import { NOT_AN_OBJECT, DB_MISSING, WSQ_ERROR, - LDB_ERROR, FORBIDDEN, INVALID_REV, FILE_EXISTS, @@ -43,7 +42,6 @@ export default { NOT_AN_OBJECT, DB_MISSING, WSQ_ERROR, - LDB_ERROR, FORBIDDEN, INVALID_REV, FILE_EXISTS, diff --git a/packages/node_modules/pouchdb-node/README.md b/packages/node_modules/pouchdb-node/README.md index 5cc3c02790..508e012735 100644 --- a/packages/node_modules/pouchdb-node/README.md +++ b/packages/node_modules/pouchdb-node/README.md @@ -4,7 +4,7 @@ pouchdb-node PouchDB, the Node-only edition. A preset representing the PouchDB code that runs in Node.js, without any of the code required to run it in the browser. The `pouchdb-node` preset contains the version of PouchDB that is designed for -Node.js. In particular, it uses the LevelDB adapter and doesn't ship with the +Node.js. In particular, it uses the nodesqlite adapter and doesn't ship with the IndexedDB or WebSQL adapters. It also contains the replication, HTTP, and map/reduce plugins. Use this preset if you are only using PouchDB in Node, and not in the browser. diff --git a/packages/node_modules/pouchdb-node/src/index.js b/packages/node_modules/pouchdb-node/src/index.js index 4a948bae4d..1e139a10bd 100644 --- a/packages/node_modules/pouchdb-node/src/index.js +++ b/packages/node_modules/pouchdb-node/src/index.js @@ -1,11 +1,11 @@ import PouchDB from 'pouchdb-core'; -import LevelPouch from 'pouchdb-adapter-leveldb'; +import NodeSqlitePouch from 'pouchdb-adapter-nodesqlite'; import HttpPouch from 'pouchdb-adapter-http'; import mapreduce from 'pouchdb-mapreduce'; import replication from 'pouchdb-replication'; -PouchDB.plugin(LevelPouch) +PouchDB.plugin(NodeSqlitePouch) .plugin(HttpPouch) .plugin(mapreduce) .plugin(replication); diff --git a/packages/node_modules/pouchdb/src/plugins/localstorage.js b/packages/node_modules/pouchdb/src/plugins/localstorage.js deleted file mode 100644 index 3f14077324..0000000000 --- a/packages/node_modules/pouchdb/src/plugins/localstorage.js +++ /dev/null @@ -1,12 +0,0 @@ -// this code only runs in the browser, as its own dist/ script - -import LocalStoragePouchPlugin from 'pouchdb-adapter-localstorage'; -import { guardedConsole } from 'pouchdb-utils'; - -if (typeof PouchDB === 'undefined') { - guardedConsole('error', 'localstorage adapter plugin error: ' + - 'Cannot find global "PouchDB" object! ' + - 'Did you remember to include pouchdb.js?'); -} else { - PouchDB.plugin(LocalStoragePouchPlugin); -} diff --git a/packages/node_modules/pouchdb/src/plugins/memory.js b/packages/node_modules/pouchdb/src/plugins/memory.js deleted file mode 100644 index 44ca3f8851..0000000000 --- a/packages/node_modules/pouchdb/src/plugins/memory.js +++ /dev/null @@ -1,12 +0,0 @@ -// this code only runs in the browser, as its own dist/ script - -import MemoryPouchPlugin from 'pouchdb-adapter-memory'; -import { guardedConsole } from 'pouchdb-utils'; - -if (typeof PouchDB === 'undefined') { - guardedConsole('error', 'memory adapter plugin error: ' + - 'Cannot find global "PouchDB" object! ' + - 'Did you remember to include pouchdb.js?'); -} else { - PouchDB.plugin(MemoryPouchPlugin); -} diff --git a/packages/node_modules/sublevel-pouchdb/LICENSE b/packages/node_modules/sublevel-pouchdb/LICENSE deleted file mode 100644 index f6cd2bc808..0000000000 --- a/packages/node_modules/sublevel-pouchdb/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/node_modules/sublevel-pouchdb/package-lock.json b/packages/node_modules/sublevel-pouchdb/package-lock.json deleted file mode 100644 index ad5a113b91..0000000000 --- a/packages/node_modules/sublevel-pouchdb/package-lock.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "sublevel-pouchdb", - "version": "7.0.0-prerelease", - "lockfileVersion": 1 -} diff --git a/packages/node_modules/sublevel-pouchdb/package.json b/packages/node_modules/sublevel-pouchdb/package.json deleted file mode 100644 index dbc3cfa2eb..0000000000 --- a/packages/node_modules/sublevel-pouchdb/package.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "sublevel-pouchdb", - "version": "7.0.0-prerelease", - "description": "Fork of level-sublevel with ony the subset of the API that PouchDB uses", - "main": "./lib/index.js", - "keywords": [], - "author": "Dale Harvey ", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "https://github.com/pouchdb/pouchdb.git", - "directory": "packages/node_modules/sublevel-pouchdb" - }, - "module": "./src/index.js", - "contributors": [ - { - "name": "Dominic Tarr", - "email": "dominic.tarr@gmail.com" - } - ] -} diff --git a/packages/node_modules/sublevel-pouchdb/src/NotFoundError.js b/packages/node_modules/sublevel-pouchdb/src/NotFoundError.js deleted file mode 100644 index 13036e5e8f..0000000000 --- a/packages/node_modules/sublevel-pouchdb/src/NotFoundError.js +++ /dev/null @@ -1,8 +0,0 @@ -class NotFoundError extends Error { - constructor() { - super(); - this.name = 'NotFoundError'; - } -} - -export default NotFoundError; diff --git a/packages/node_modules/sublevel-pouchdb/src/batch.js b/packages/node_modules/sublevel-pouchdb/src/batch.js deleted file mode 100644 index 6dd533675c..0000000000 --- a/packages/node_modules/sublevel-pouchdb/src/batch.js +++ /dev/null @@ -1,37 +0,0 @@ -function addOperation(type, key, value, options) { - var operation = { - type, - key, - value, - options - }; - - if (options && options.prefix) { - operation.prefix = options.prefix; - delete options.prefix; - } - - this._operations.push(operation); - - return this; -} - -function Batch(sdb) { - this._operations = []; - this._sdb = sdb; - - this.put = addOperation.bind(this, 'put'); - this.del = addOperation.bind(this, 'del'); -} - -var B = Batch.prototype; - -B.clear = function () { - this._operations = []; -}; - -B.write = function (cb) { - this._sdb.batch(this._operations, cb); -}; - -export default Batch; diff --git a/packages/node_modules/sublevel-pouchdb/src/index.js b/packages/node_modules/sublevel-pouchdb/src/index.js deleted file mode 100644 index 67f8a2523e..0000000000 --- a/packages/node_modules/sublevel-pouchdb/src/index.js +++ /dev/null @@ -1,13 +0,0 @@ -import nut from './nut'; -import shell from './shell'; -import Codec from 'level-codec'; -import ReadStream from './readStream'; -import precodec from './legacyCodec'; - -var codec = new Codec(); - -function sublevelPouch(db) { - return shell(nut(db, precodec, codec), [], ReadStream, db.options); -} - -export default sublevelPouch; diff --git a/packages/node_modules/sublevel-pouchdb/src/legacyCodec.js b/packages/node_modules/sublevel-pouchdb/src/legacyCodec.js deleted file mode 100644 index b35262a479..0000000000 --- a/packages/node_modules/sublevel-pouchdb/src/legacyCodec.js +++ /dev/null @@ -1,12 +0,0 @@ -export default { - encode: function (decodedKey) { - return '\xff' + decodedKey[0] + '\xff' + decodedKey[1]; - }, - decode: function (encodedKeyAsBuffer) { - var str = encodedKeyAsBuffer.toString(); - var idx = str.indexOf('\xff', 1); - return [str.substring(1, idx), str.substring(idx + 1)]; - }, - lowerBound: '\x00', - upperBound: '\xff' -}; diff --git a/packages/node_modules/sublevel-pouchdb/src/nut.js b/packages/node_modules/sublevel-pouchdb/src/nut.js deleted file mode 100644 index 71b5192b27..0000000000 --- a/packages/node_modules/sublevel-pouchdb/src/nut.js +++ /dev/null @@ -1,134 +0,0 @@ -import ltgt from 'ltgt'; - -function isFunction(f) { - return 'function' === typeof f; -} - -function getPrefix(db) { - if (isFunction(db.prefix)) { - return db.prefix(); - } - return db; -} - -function clone(_obj) { - var obj = {}; - for (var k in _obj) { - obj[k] = _obj[k]; - } - return obj; -} - -function nut(db, precodec, codec) { - function encodePrefix(prefix, key, opts1, opts2) { - return precodec.encode([ prefix, codec.encodeKey(key, opts1, opts2 ) ]); - } - - function addEncodings(op, prefix) { - if (prefix && prefix.options) { - op.keyEncoding = - op.keyEncoding || prefix.options.keyEncoding; - op.valueEncoding = - op.valueEncoding || prefix.options.valueEncoding; - } - return op; - } - - db.open(function () { /* no-op */}); - - return { - apply: function (ops, opts, cb) { - opts = opts || {}; - - var batch = []; - var i = -1; - var len = ops.length; - - while (++i < len) { - var op = ops[i]; - addEncodings(op, op.prefix); - op.prefix = getPrefix(op.prefix); - batch.push({ - key: encodePrefix(op.prefix, op.key, opts, op), - value: op.type !== 'del' && codec.encodeValue(op.value, opts, op), - type: op.type - }); - } - db.db.batch(batch, opts, cb); - }, - get: function (key, prefix, opts, cb) { - opts.asBuffer = codec.valueAsBuffer(opts); - return db.db.get( - encodePrefix(prefix, key, opts), - opts, - function (err, value) { - if (err) { - cb(err); - } else { - cb(null, codec.decodeValue(value, opts)); - } - } - ); - }, - createDecoder: function (opts) { - return function (key, value) { - return { - key: codec.decodeKey(precodec.decode(key)[1], opts), - value: codec.decodeValue(value, opts) - }; - }; - }, - isClosed: function isClosed() { - return db.isClosed(); - }, - close: function close(cb) { - return db.close(cb); - }, - iterator: function (_opts) { - var opts = clone(_opts || {}); - var prefix = _opts.prefix || []; - - function encodeKey(key) { - return encodePrefix(prefix, key, opts, {}); - } - - ltgt.toLtgt(_opts, opts, encodeKey, precodec.lowerBound, precodec.upperBound); - - // if these legacy values are in the options, remove them - - opts.prefix = null; - - //************************************************ - //hard coded defaults, for now... - //TODO: pull defaults and encoding out of levelup. - opts.keyAsBuffer = opts.valueAsBuffer = false; - //************************************************ - - - //this is vital, otherwise limit: undefined will - //create an empty stream. - /* istanbul ignore next */ - if ('number' !== typeof opts.limit) { - opts.limit = -1; - } - - opts.keyAsBuffer = precodec.buffer; - opts.valueAsBuffer = codec.valueAsBuffer(opts); - - function wrapIterator(iterator) { - return { - next: function (cb) { - return iterator.next(cb); - }, - end: function (cb) { - iterator.end(cb); - } - }; - } - - return wrapIterator(db.db.iterator(opts)); - } - }; -} - -export default nut; diff --git a/packages/node_modules/sublevel-pouchdb/src/pull.js b/packages/node_modules/sublevel-pouchdb/src/pull.js deleted file mode 100644 index fc9d996d74..0000000000 --- a/packages/node_modules/sublevel-pouchdb/src/pull.js +++ /dev/null @@ -1,28 +0,0 @@ -import pull from 'pull-stream'; -// Currently this uses pull streams, -// and not levelup's readstream, but in theory -// I should be able pretty much just drop that in. - -function pullReadStream(options, makeData) { - var stream = pull.defer(); - stream.setIterator = function (iterator) { - stream.resolve(function (end, cb) { - if (!end) { - iterator.next(function (err, key, value) { - if (err) { - return cb(err); - } - if (key === undefined || value === undefined) { - return cb(true); - } - cb(null, makeData(key, value)); - }); - } else { - iterator.end(cb); - } - }); - }; - - return stream; -} -export default pullReadStream; diff --git a/packages/node_modules/sublevel-pouchdb/src/readStream.js b/packages/node_modules/sublevel-pouchdb/src/readStream.js deleted file mode 100644 index dca4ff70ef..0000000000 --- a/packages/node_modules/sublevel-pouchdb/src/readStream.js +++ /dev/null @@ -1,115 +0,0 @@ -/* Copyright (c) 2012-2014 LevelUP contributors - * See list at - * MIT License - */ - -// NOTE: we are fixed to readable-stream@1.0.x for now -// for pure Streams2 across Node versions -import ReadableStreamCore from 'readable-stream'; - -var Readable = ReadableStreamCore.Readable; - -function createClass(parent, init) { - let klass = function (...args) { - if (!(this instanceof klass)) { - return new klass(...args); - } - init.apply(this, args); - }; - klass.prototype = Object.create(parent.prototype, { - constructor: { value: klass } - }); - return klass; -} - -class ReadStreamInternal extends Readable { - constructor(options, makeData) { - super({ objectMode: true, highWaterMark: options.highWaterMark }); - this._setup(options, makeData); - } - - _setup(options, makeData) { - super.constructor({ objectMode: true, highWaterMark: options.highWaterMark }); - - // purely to keep `db` around until we're done so it's not GCed if the user doesn't keep a ref - this._waiting = false; - this._options = options; - this._makeData = makeData; - } - - setIterator(it) { - this._iterator = it; - /* istanbul ignore if */ - if (this._destroyed) { - return it.end(function () {}); - } - /* istanbul ignore if */ - if (this._waiting) { - this._waiting = false; - return this._read(); - } - return this; - } - - _cleanup(err) { - if (this._destroyed) { - return; - } - - this._destroyed = true; - - var self = this; - /* istanbul ignore if */ - if (err && err.message !== 'iterator has ended') { - self.emit('error', err); - } - - /* istanbul ignore else */ - if (self._iterator) { - self._iterator.end(function () { - self._iterator = null; - self.emit('close'); - }); - } else { - self.emit('close'); - } - } - - destroy() { - this._cleanup(); - } - - _read() { - var self = this; - /* istanbul ignore if */ - if (self._destroyed) { - return; - } - /* istanbul ignore if */ - if (!self._iterator) { - return this._waiting = true; - } - - self._iterator.next(function (err, key, value) { - if (err || (key === undefined && value === undefined)) { - if (!err && !self._destroyed) { - self.push(null); - } - return self._cleanup(err); - } - - - value = self._makeData(key, value); - if (!self._destroyed) { - self.push(value); - } - }); - } -} - -const ReadStream = createClass(ReadStreamInternal, function (options, makeData) { - ReadStreamInternal.prototype._setup.call(this, options, makeData); -}); - -export default ReadStream; - diff --git a/packages/node_modules/sublevel-pouchdb/src/shell.js b/packages/node_modules/sublevel-pouchdb/src/shell.js deleted file mode 100644 index fd7cf2ee0e..0000000000 --- a/packages/node_modules/sublevel-pouchdb/src/shell.js +++ /dev/null @@ -1,131 +0,0 @@ -import events from 'events'; -import NotFoundError from './NotFoundError'; - -var EventEmitter = events.EventEmitter; -var version = "6.5.4"; - -var NOT_FOUND_ERROR = new NotFoundError(); - -var sublevel = function (nut, prefix, createStream, options) { - var emitter = new EventEmitter(); - emitter.sublevels = {}; - emitter.options = options; - - emitter.version = version; - - emitter.methods = {}; - prefix = prefix || []; - - function mergeOpts(opts) { - var o = {}; - var k; - if (options) { - for (k in options) { - if (typeof options[k] !== 'undefined') { - o[k] = options[k]; - } - } - } - if (opts) { - for (k in opts) { - if (typeof opts[k] !== 'undefined') { - o[k] = opts[k]; - } - } - } - return o; - } - - emitter.put = function (key, value, opts, cb) { - if ('function' === typeof opts) { - cb = opts; - opts = {}; - } - - nut.apply([{ - key, value, - prefix: prefix.slice(), type: 'put' - }], mergeOpts(opts), function (err) { - /* istanbul ignore next */ - if (err) { - return cb(err); - } - emitter.emit('put', key, value); - cb(null); - }); - }; - - emitter.prefix = function () { - return prefix.slice(); - }; - - emitter.batch = function (ops, opts, cb) { - if ('function' === typeof opts) { - cb = opts; - opts = {}; - } - - ops = ops.map(function (op) { - return { - key: op.key, - value: op.value, - prefix: op.prefix || prefix, - keyEncoding: op.keyEncoding, // * - valueEncoding: op.valueEncoding, // * (TODO: encodings on sublevel) - type: op.type - }; - }); - - nut.apply(ops, mergeOpts(opts), function (err) { - /* istanbul ignore next */ - if (err) { - return cb(err); - } - emitter.emit('batch', ops); - cb(null); - }); - }; - - emitter.get = function (key, opts, cb) { - /* istanbul ignore else */ - if ('function' === typeof opts) { - cb = opts; - opts = {}; - } - nut.get(key, prefix, mergeOpts(opts), function (err, value) { - if (err) { - cb(NOT_FOUND_ERROR); - } else { - cb(null, value); - } - }); - }; - - emitter.sublevel = function (name, opts) { - return emitter.sublevels[name] = - emitter.sublevels[name] || sublevel(nut, prefix.concat(name), createStream, mergeOpts(opts)); - }; - - emitter.readStream = emitter.createReadStream = function (opts) { - opts = mergeOpts(opts); - opts.prefix = prefix; - var stream; - var it = nut.iterator(opts); - - stream = createStream(opts, nut.createDecoder(opts)); - stream.setIterator(it); - - return stream; - }; - - emitter.close = function (cb) { - nut.close(cb); - }; - - emitter.isOpen = nut.isOpen; - emitter.isClosed = nut.isClosed; - - return emitter; -}; - -export default sublevel; diff --git a/tests/common-utils.js b/tests/common-utils.js index 5d6e2805e0..e92c313796 100644 --- a/tests/common-utils.js +++ b/tests/common-utils.js @@ -41,7 +41,7 @@ commonUtils.plugins = function () { return plugins ? plugins.split(',') : []; }; -var PLUGIN_ADAPTERS = ['indexeddb', 'localstorage', 'memory', 'node-websql']; +var PLUGIN_ADAPTERS = ['indexeddb', 'node-websql', 'nodesqlite']; commonUtils.loadPouchDB = function (opts) { opts = opts || {}; diff --git a/tests/integration/leveldb/lateStyle/000003.log b/tests/integration/leveldb/lateStyle/000003.log deleted file mode 100644 index 33de8cc352..0000000000 Binary files a/tests/integration/leveldb/lateStyle/000003.log and /dev/null differ diff --git a/tests/integration/leveldb/lateStyle/CURRENT b/tests/integration/leveldb/lateStyle/CURRENT deleted file mode 100644 index 1a84852211..0000000000 --- a/tests/integration/leveldb/lateStyle/CURRENT +++ /dev/null @@ -1 +0,0 @@ -MANIFEST-000002 diff --git a/tests/integration/leveldb/lateStyle/LOCK b/tests/integration/leveldb/lateStyle/LOCK deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/lateStyle/LOG b/tests/integration/leveldb/lateStyle/LOG deleted file mode 100644 index b83544f753..0000000000 --- a/tests/integration/leveldb/lateStyle/LOG +++ /dev/null @@ -1 +0,0 @@ -2014/10/18-13:54:18.233480 103b7a000 Delete type=3 #1 diff --git a/tests/integration/leveldb/lateStyle/MANIFEST-000002 b/tests/integration/leveldb/lateStyle/MANIFEST-000002 deleted file mode 100644 index bbbc585686..0000000000 Binary files a/tests/integration/leveldb/lateStyle/MANIFEST-000002 and /dev/null differ diff --git a/tests/integration/leveldb/laterStyle/000003.log b/tests/integration/leveldb/laterStyle/000003.log deleted file mode 100644 index 08a82f0e76..0000000000 Binary files a/tests/integration/leveldb/laterStyle/000003.log and /dev/null differ diff --git a/tests/integration/leveldb/laterStyle/CURRENT b/tests/integration/leveldb/laterStyle/CURRENT deleted file mode 100644 index 1a84852211..0000000000 --- a/tests/integration/leveldb/laterStyle/CURRENT +++ /dev/null @@ -1 +0,0 @@ -MANIFEST-000002 diff --git a/tests/integration/leveldb/laterStyle/LOCK b/tests/integration/leveldb/laterStyle/LOCK deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/laterStyle/LOG b/tests/integration/leveldb/laterStyle/LOG deleted file mode 100644 index 80ca43ed7b..0000000000 --- a/tests/integration/leveldb/laterStyle/LOG +++ /dev/null @@ -1 +0,0 @@ -2014/12/13-15:08:05.242031 103b97000 Delete type=3 #1 diff --git a/tests/integration/leveldb/laterStyle/MANIFEST-000002 b/tests/integration/leveldb/laterStyle/MANIFEST-000002 deleted file mode 100644 index bbbc585686..0000000000 Binary files a/tests/integration/leveldb/laterStyle/MANIFEST-000002 and /dev/null differ diff --git a/tests/integration/leveldb/middleStyle/000003.log b/tests/integration/leveldb/middleStyle/000003.log deleted file mode 100644 index 05cc8457c4..0000000000 Binary files a/tests/integration/leveldb/middleStyle/000003.log and /dev/null differ diff --git a/tests/integration/leveldb/middleStyle/CURRENT b/tests/integration/leveldb/middleStyle/CURRENT deleted file mode 100644 index 1a84852211..0000000000 --- a/tests/integration/leveldb/middleStyle/CURRENT +++ /dev/null @@ -1 +0,0 @@ -MANIFEST-000002 diff --git a/tests/integration/leveldb/middleStyle/LOCK b/tests/integration/leveldb/middleStyle/LOCK deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/middleStyle/LOG b/tests/integration/leveldb/middleStyle/LOG deleted file mode 100644 index bc24db1a48..0000000000 --- a/tests/integration/leveldb/middleStyle/LOG +++ /dev/null @@ -1 +0,0 @@ -2014/04/30-13:26:08.586711 106881000 Delete type=3 #1 diff --git a/tests/integration/leveldb/middleStyle/MANIFEST-000002 b/tests/integration/leveldb/middleStyle/MANIFEST-000002 deleted file mode 100644 index dbf594eac7..0000000000 Binary files a/tests/integration/leveldb/middleStyle/MANIFEST-000002 and /dev/null differ diff --git a/tests/integration/leveldb/oldStyle.uuid b/tests/integration/leveldb/oldStyle.uuid deleted file mode 100644 index 751fa94191..0000000000 --- a/tests/integration/leveldb/oldStyle.uuid +++ /dev/null @@ -1 +0,0 @@ -F25693E5-4052-4F30-BEAF-9D1E48DD05CF \ No newline at end of file diff --git a/tests/integration/leveldb/oldStyle/attach-binary-store/000003.log b/tests/integration/leveldb/oldStyle/attach-binary-store/000003.log deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/oldStyle/attach-binary-store/CURRENT b/tests/integration/leveldb/oldStyle/attach-binary-store/CURRENT deleted file mode 100644 index 1a84852211..0000000000 --- a/tests/integration/leveldb/oldStyle/attach-binary-store/CURRENT +++ /dev/null @@ -1 +0,0 @@ -MANIFEST-000002 diff --git a/tests/integration/leveldb/oldStyle/attach-binary-store/LOCK b/tests/integration/leveldb/oldStyle/attach-binary-store/LOCK deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/oldStyle/attach-binary-store/LOG b/tests/integration/leveldb/oldStyle/attach-binary-store/LOG deleted file mode 100644 index 5b9c46e1bc..0000000000 --- a/tests/integration/leveldb/oldStyle/attach-binary-store/LOG +++ /dev/null @@ -1 +0,0 @@ -2014/02/21-19:45:02.682927 103b87000 Delete type=3 #1 diff --git a/tests/integration/leveldb/oldStyle/attach-binary-store/MANIFEST-000002 b/tests/integration/leveldb/oldStyle/attach-binary-store/MANIFEST-000002 deleted file mode 100644 index bbbc585686..0000000000 Binary files a/tests/integration/leveldb/oldStyle/attach-binary-store/MANIFEST-000002 and /dev/null differ diff --git a/tests/integration/leveldb/oldStyle/attach-store/000003.log b/tests/integration/leveldb/oldStyle/attach-store/000003.log deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/oldStyle/attach-store/CURRENT b/tests/integration/leveldb/oldStyle/attach-store/CURRENT deleted file mode 100644 index 1a84852211..0000000000 --- a/tests/integration/leveldb/oldStyle/attach-store/CURRENT +++ /dev/null @@ -1 +0,0 @@ -MANIFEST-000002 diff --git a/tests/integration/leveldb/oldStyle/attach-store/LOCK b/tests/integration/leveldb/oldStyle/attach-store/LOCK deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/oldStyle/attach-store/LOG b/tests/integration/leveldb/oldStyle/attach-store/LOG deleted file mode 100644 index 2c68122fef..0000000000 --- a/tests/integration/leveldb/oldStyle/attach-store/LOG +++ /dev/null @@ -1 +0,0 @@ -2014/02/21-19:45:02.682637 103a81000 Delete type=3 #1 diff --git a/tests/integration/leveldb/oldStyle/attach-store/MANIFEST-000002 b/tests/integration/leveldb/oldStyle/attach-store/MANIFEST-000002 deleted file mode 100644 index bbbc585686..0000000000 Binary files a/tests/integration/leveldb/oldStyle/attach-store/MANIFEST-000002 and /dev/null differ diff --git a/tests/integration/leveldb/oldStyle/by-sequence/000003.log b/tests/integration/leveldb/oldStyle/by-sequence/000003.log deleted file mode 100644 index 953ab4b4b6..0000000000 Binary files a/tests/integration/leveldb/oldStyle/by-sequence/000003.log and /dev/null differ diff --git a/tests/integration/leveldb/oldStyle/by-sequence/CURRENT b/tests/integration/leveldb/oldStyle/by-sequence/CURRENT deleted file mode 100644 index 1a84852211..0000000000 --- a/tests/integration/leveldb/oldStyle/by-sequence/CURRENT +++ /dev/null @@ -1 +0,0 @@ -MANIFEST-000002 diff --git a/tests/integration/leveldb/oldStyle/by-sequence/LOCK b/tests/integration/leveldb/oldStyle/by-sequence/LOCK deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/oldStyle/by-sequence/LOG b/tests/integration/leveldb/oldStyle/by-sequence/LOG deleted file mode 100644 index 52362b6e28..0000000000 --- a/tests/integration/leveldb/oldStyle/by-sequence/LOG +++ /dev/null @@ -1 +0,0 @@ -2014/02/21-19:45:02.682960 103b04000 Delete type=3 #1 diff --git a/tests/integration/leveldb/oldStyle/by-sequence/MANIFEST-000002 b/tests/integration/leveldb/oldStyle/by-sequence/MANIFEST-000002 deleted file mode 100644 index bbbc585686..0000000000 Binary files a/tests/integration/leveldb/oldStyle/by-sequence/MANIFEST-000002 and /dev/null differ diff --git a/tests/integration/leveldb/oldStyle/document-store/000003.log b/tests/integration/leveldb/oldStyle/document-store/000003.log deleted file mode 100644 index c011370ddb..0000000000 Binary files a/tests/integration/leveldb/oldStyle/document-store/000003.log and /dev/null differ diff --git a/tests/integration/leveldb/oldStyle/document-store/CURRENT b/tests/integration/leveldb/oldStyle/document-store/CURRENT deleted file mode 100644 index 1a84852211..0000000000 --- a/tests/integration/leveldb/oldStyle/document-store/CURRENT +++ /dev/null @@ -1 +0,0 @@ -MANIFEST-000002 diff --git a/tests/integration/leveldb/oldStyle/document-store/LOCK b/tests/integration/leveldb/oldStyle/document-store/LOCK deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/integration/leveldb/oldStyle/document-store/LOG b/tests/integration/leveldb/oldStyle/document-store/LOG deleted file mode 100644 index 4833ff5ace..0000000000 --- a/tests/integration/leveldb/oldStyle/document-store/LOG +++ /dev/null @@ -1 +0,0 @@ -2014/02/21-19:45:02.682986 102781000 Delete type=3 #1 diff --git a/tests/integration/leveldb/oldStyle/document-store/MANIFEST-000002 b/tests/integration/leveldb/oldStyle/document-store/MANIFEST-000002 deleted file mode 100644 index bbbc585686..0000000000 Binary files a/tests/integration/leveldb/oldStyle/document-store/MANIFEST-000002 and /dev/null differ diff --git a/tests/integration/leveldown.migration.js b/tests/integration/leveldown.migration.js deleted file mode 100644 index 54f5035ffd..0000000000 --- a/tests/integration/leveldown.migration.js +++ /dev/null @@ -1,160 +0,0 @@ -'use strict'; -if (!process.env.LEVEL_ADAPTER && - !process.env.LEVEL_PREFIX && - !process.env.AUTO_COMPACTION && - !process.env.ADAPTERS) { - // these tests don't make sense for anything other than default leveldown - var fs = require('fs'); - var ncp = require('ncp').ncp; - - ncp.limit = 16; - - describe('migration one', function () { - beforeEach(function (done) { - var input = - fs.createReadStream('./tests/integration/leveldb/oldStyle.uuid'); - input.on('end', function () { - ncp('./tests/integration/leveldb/oldStyle', - './tmp/_pouch_oldStyle', done); - }); - input.pipe(fs.createWriteStream('./tmp/_pouch_oldStyle.uuid')); - }); - it('should work', function () { - var db = new PouchDB('oldStyle'); - return db.get('doc').then(function (doc) { - doc.something.should.equal('awesome'); - return db.destroy(); - }); - }); - }); - describe('migration two', function () { - beforeEach(function (done) { - ncp('./tests/integration/leveldb/middleStyle', - './tmp/_pouch_middleStyle', done); - }); - it('should work', function () { - var db = new PouchDB('middleStyle'); - return db.id().then(function (id) { - id.should.equal('8E049E64-784A-3209-8DD6-97C29D7A5868'); - return db.get('_local/foo'); - }).then(function (resp) { - resp.something.should.equal('else'); - return db.allDocs(); - }).then(function (resp) { - resp.total_rows.should.equal(1); - resp.rows[0].id.should.equal('_design/foo'); - return db.destroy(); - }); - }); - }); - - // sanity check to ensure we don't actually need to migrate - // attachments for #2818 - describe('#2818 no migration needed for attachments', function () { - beforeEach(function (done) { - ncp('./tests/integration/leveldb/lateStyle', - './tmp/_pouch_lateStyle', done); - }); - it('should work', function () { - var db = new PouchDB('lateStyle', { auto_compaction: false }); - return db.put({ - _id: 'doc_b', - _attachments: { - 'att.txt': { - data: 'Zm9v', // 'foo' - content_type: 'text/plain' - } - } - }).then(function () { - return db.get('doc_b'); - }).then(function (doc) { - return db.remove(doc); - }).then(function () { - return db.compact(); - }).then(function () { - return db.get('doc_a', {attachments: true}); - }).then(function (doc) { - doc._attachments['att.txt'].data.should.equal('Zm9vYmFy'); - doc._attachments['att2.txt'].data.should.equal('Zm9vYmFy'); - doc._attachments['att3.txt'].data.should.equal('Zm9v'); - return db.destroy(); - }); - }); - }); - - // Sanity check for the fix in 3136 that guards against successive - // new_edits to the same rev, ensuring it ignores duplicates. - // - // In the olden days, <=3.2.0, if you bulk-pushed with new_edits=false - // the same rev multiple times, LevelDB would keep incrementing the seq - // and keep writing new revs. This was fixed, but when we do _changes, - // we have to guard against these duplicate seqs for backwards compat. - // - // This test is very similar to the test.bulk_docs.js test: - // 'Testing successive new_edits to the same doc, different content' - - describe('#3136 no migration needed for overwritten revs', function () { - beforeEach(function (done) { - ncp('./tests/integration/leveldb/laterStyle', - './tmp/_pouch_laterStyle', done); - }); - it('should work', function () { - var db = new PouchDB('laterStyle'); - - // basically this a db where I did a very pathological thing: - //var docsA = [{ - // '_id': 'foo', - // '_rev': '1-x', - // 'bar' : 'baz', - // '_revisions': { - // 'start': 1, - // 'ids': ['x'] - // } - //}, { - // '_id' : 'fee', - // '_rev': '1-x', - // '_revisions': { - // 'start': 1, - // 'ids': ['x'] - // } - //}]; - //var docsB = [{ - // '_id': 'foo', - // '_rev': '1-x', - // 'bar' : 'zam', // this update should be rejected - // '_revisions': { - // 'start': 1, - // 'ids': ['x'] - // } - //}]; - // - //db.bulkDocs(docsA, {new_edits: false}); - //db.bulkDocs(docsB, {new_edits: false}); - - - return db.changes({ - include_docs: true, - return_docs: true - }).then(function (result) { - // the important thing is that 'zam' is ignored. see - // the other test in test.bulk_docs.js for details - var expected = { - "results": [{ - "id": "fee", - "changes": [{"rev": "1-x"}], - "doc": {"_id": "fee", "_rev": "1-x"}, - "seq": 1 - }, { - "id": "foo", - "changes": [{"rev": "1-x"}], - "doc": {"bar": "baz", "_id": "foo", "_rev": "1-x"}, - "seq": 2 - }], - "last_seq": 2 - }; - result.should.deep.equal(expected); - return db.destroy(); - }); - }); - }); -} diff --git a/tests/integration/test.compaction.js b/tests/integration/test.compaction.js index 1d0334472d..090bd94217 100644 --- a/tests/integration/test.compaction.js +++ b/tests/integration/test.compaction.js @@ -519,7 +519,7 @@ adapters.forEach(function (adapter) { // NO MORE HTTP TESTS AFTER THIS POINT! // // We're testing some functionality specific to the implementation - // of pouch-adapter-(idb|websql|leveldb), skip the tests if + // of pouch-adapter-(idb|websql), skip the tests if // HTTP or if using new adapters (NEXT=1). // TODO BEFORE MERGING: Do we still need to block this off for the indexeddb diff --git a/tests/integration/test.defaults.js b/tests/integration/test.defaults.js deleted file mode 100644 index cae0518ba7..0000000000 --- a/tests/integration/test.defaults.js +++ /dev/null @@ -1,199 +0,0 @@ -'use strict'; -if (!process.env.LEVEL_ADAPTER && - !process.env.LEVEL_PREFIX && - !process.env.AUTO_COMPACTION && - !process.env.ADAPTERS) { - // these tests don't make sense for anything other than default leveldown - var path = require('path'); - const { mkdirSync } = require('fs'); - var rimraf = require('rimraf'); - - describe('defaults', function () { - - beforeEach(function () { - return new PouchDB('mydb').destroy().then(function () { - return new PouchDB('mydb', {db: require('memdown')}).destroy(); - }); - }); - - afterEach(function (done) { - rimraf.sync('./tmp/_pouch_.'); - rimraf.sync('./tmp/path'); - done(); - }); - - it('should allow prefixes', function () { - var prefix = './tmp/path/to/db/1/'; - var dir = path.join(prefix, '/tmp/'); - var dir2 = path.join('./tmp/_pouch_./', prefix); - var dir3 = path.join(dir2, './tmp/_pouch_mydb'); - mkdirSync(dir, { recursive:true }); - mkdirSync(dir2, { recursive:true }); - mkdirSync(dir3, { recursive:true }); - - var db = new PouchDB('mydb', {prefix}); - return db.info().then(function (info1) { - info1.db_name.should.equal('mydb'); - return db.destroy(); - }); - }); - - it('Defaults leaks eventEmitters', function () { - PouchDB.defaults({db: require('memdown') }); - PouchDB.defaults({db: require('memdown') }); - PouchDB.defaults({db: require('memdown') }); - PouchDB.defaults({db: require('memdown') }); - }); - - it('should allow us to set a prefix by default', function () { - var prefix = './tmp/path/to/db/2/'; - var dir = path.join(prefix, '/tmp/'); - var dir2 = path.join('./tmp/_pouch_./', prefix); - var dir3 = path.join(dir2, './tmp/_pouch_mydb'); - mkdirSync(dir, { recursive:true }); - mkdirSync(dir2, { recursive:true }); - mkdirSync(dir3, { recursive:true }); - - var CustomPouch = PouchDB.defaults({ - prefix - }); - var db = CustomPouch({name: 'mydb'}); - return db.info().then(function (info1) { - info1.db_name.should.equal('mydb'); - return db.destroy(); - }); - }); - - it('should allow us to use memdown', function () { - var opts = { name: 'mydb', db: require('memdown') }; - var db = new PouchDB(opts); - return db.put({_id: 'foo'}).then(function () { - var otherDB = new PouchDB('mydb'); - return db.info().then(function (info1) { - return otherDB.info().then(function (info2) { - info1.doc_count.should.not.equal(info2.doc_count); - return otherDB.destroy(); - }).then(function () { - return db.destroy(); - }); - }); - }); - }); - - it('should allow us to destroy memdown', function () { - var opts = {db: require('memdown') }; - var db = new PouchDB('mydb', opts); - return db.put({_id: 'foo'}).then(function () { - var otherDB = new PouchDB('mydb', opts); - return db.info().then(function (info1) { - return otherDB.info().then(function (info2) { - info1.doc_count.should.equal(info2.doc_count); - return otherDB.destroy(); - }).then(function () { - var db3 = new PouchDB('mydb', opts); - return db3.info().then(function (info) { - info.doc_count.should.equal(0); - return db3.destroy(); - }); - }); - }); - }); - }); - - it('should allow us to use memdown by default', function () { - var CustomPouch = PouchDB.defaults({db: require('memdown')}); - var db = new CustomPouch('mydb'); - return db.put({_id: 'foo'}).then(function () { - var otherDB = new PouchDB('mydb'); - return db.info().then(function (info1) { - return otherDB.info().then(function (info2) { - info1.doc_count.should.not.equal(info2.doc_count); - return otherDB.destroy(); - }).then(function () { - return db.destroy(); - }); - }); - }); - }); - - - it('should inform us when using memdown', function () { - var opts = { name: 'mydb', db: require('memdown') }; - var db = new PouchDB(opts); - return db.info().then(function (info) { - info.backend_adapter.should.equal('MemDOWN'); - }); - }); - - it('constructor emits destroyed when using defaults', function () { - var CustomPouch = PouchDB.defaults({db: require('memdown')}); - - var db = new CustomPouch('mydb'); - return new Promise(function (resolve) { - CustomPouch.once('destroyed', function (name) { - name.should.equal('mydb'); - resolve(); - }); - db.destroy(); - }); - }); - - it('db emits destroyed when using defaults', function () { - var CustomPouch = PouchDB.defaults({db: require('memdown')}); - - var db = new CustomPouch('mydb'); - return new Promise(function (resolve) { - db.once('destroyed', resolve); - db.destroy(); - }); - }); - - it('constructor emits creation event', function (done) { - var CustomPouch = PouchDB.defaults({db: require('memdown')}); - - CustomPouch.once('created', function (name) { - name.should.equal('mydb', 'should be same thing'); - done(); - }); - new PouchDB('mydb'); - }); - - // somewhat odd behavior (CustomPouch constructor always mirrors PouchDB), - // but better to test it explicitly - it('PouchDB emits destroyed when using defaults', function () { - var CustomPouch = PouchDB.defaults({db: require('memdown')}); - - var db = new CustomPouch('mydb'); - return new Promise(function (resolve) { - PouchDB.once('destroyed', function (name) { - name.should.equal('mydb'); - resolve(); - }); - db.destroy(); - }); - }); - - // somewhat odd behavior (CustomPouch constructor always mirrors PouchDB), - // but better to test it explicitly - it('PouchDB emits created when using defaults', function (done) { - var CustomPouch = PouchDB.defaults({db: require('memdown')}); - - PouchDB.once('created', function (name) { - name.should.equal('mydb', 'should be same thing'); - done(); - }); - new CustomPouch('mydb'); - }); - - it('should be transitive (#5922)', function () { - var CustomPouch = PouchDB - .defaults({db: require('memdown')}) - .defaults({}); - - var db = new CustomPouch('mydb'); - return db.info().then(function (info) { - info.backend_adapter.should.equal('MemDOWN'); - }); - }); - }); -} diff --git a/tests/integration/test.failures.js b/tests/integration/test.failures.js deleted file mode 100644 index 4717df8866..0000000000 --- a/tests/integration/test.failures.js +++ /dev/null @@ -1,85 +0,0 @@ -'use strict'; - -if (!process.env.LEVEL_ADAPTER && - !process.env.LEVEL_PREFIX && - !process.env.AUTO_COMPACTION && - !process.env.ADAPTERS) { - // these tests don't make sense for anything other than default leveldown - - describe('test.failures.js', function () { - - describe('invalid path', function () { - - var invalidPath = 'C:\\/path/to/thing/that/doesnt/exist\\with\\backslashes\\too'; - - it('fails gracefully in first API call', function () { - var db = new PouchDB(invalidPath); - return db.info().then(function () { - throw new Error('expected an error here'); - }, function (err) { - should.exist(err); - }); - }); - - it('fails gracefully in first changes() call', function () { - var db = new PouchDB(invalidPath); - return db.changes().then(function () { - throw new Error('expected an error here'); - }, function (err) { - should.exist(err); - }); - }); - - it('fails for all API calls', function () { - var db = new PouchDB(invalidPath); - - function expectError(promise) { - return promise.then(function () { - throw new Error('expected an error here'); - }, function (err) { - should.exist(err); - }); - } - - return expectError(db.changes()).then(function () { - return expectError(db.info()); - }).then(function () { - return expectError(db.get('foo')); - }); - }); - - }); - - describe('error stack', function () { - var dbs = {}; - - beforeEach(function () { - dbs.name = testUtils.adapterUrl('local', 'testdb'); - }); - - afterEach(function (done) { - testUtils.cleanup([dbs.name], done); - }); - - it('INVALID_ID error stack', function (done) { - var db = new PouchDB(dbs.name); - db.get(1234, function (err) { - try { - err.stack.should.be.a('string'); - err.status.should.equal(testUtils.errors.INVALID_ID.status, - 'correct error status returned'); - err.name.should.equal(testUtils.errors.INVALID_ID.name, - 'correct error name returned'); - err.message.should.equal(testUtils.errors.INVALID_ID.message, - 'correct error message returned'); - done(); - } catch (error) { - done(error); - } - }); - }); - }); - - }); - -} diff --git a/tests/integration/test.issue915.js b/tests/integration/test.issue915.js deleted file mode 100644 index b97633d32a..0000000000 --- a/tests/integration/test.issue915.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; -if (!process.env.LEVEL_ADAPTER && - !process.env.LEVEL_PREFIX && - !process.env.AUTO_COMPACTION && - !process.env.ADAPTERS) { - // these tests don't make sense for anything other than default leveldown - var fs = require('fs'); - describe('test.issue915.js', function () { - afterEach(function (done) { - fs.unlink('./tmp/_pouch_veryimportantfiles/something', function () { - fs.rmdir('./tmp/_pouch_veryimportantfiles/', function () { - done(); - }); - }); - }); - it('Put a file in the db, then destroy it', function (done) { - var db = new PouchDB('veryimportantfiles'); - fs.writeFile('./tmp/_pouch_veryimportantfiles/something', - Buffer.from('lalala', 'utf8'), function () { - db.destroy(function (err) { - if (err) { - return done(err); - } - fs.readFile('./tmp/_pouch_veryimportantfiles/something', - {encoding: 'utf8'}, function (err, resp) { - if (err) { - return done(err); - } - resp.should.equal('lalala', - './tmp/veryimportantfiles/something was not removed'); - done(); - }); - }); - }); - }); - }); -} diff --git a/tests/integration/test.prefix.js b/tests/integration/test.prefix.js index ec90b10636..e4cd0f8f7d 100644 --- a/tests/integration/test.prefix.js +++ b/tests/integration/test.prefix.js @@ -32,37 +32,3 @@ describe('test.prefix.js', function () { }); }); - -// This is also tested in test.defaults.js, however I wanted to cover -// the different use cases of prefix in here -if (typeof process !== 'undefined' && - !process.env.LEVEL_ADAPTER && - !process.env.LEVEL_PREFIX && - !process.env.ADAPTERS && - // fails on windows with EBUSY - "resource busy or locked", not worth fixing - require('os').platform() !== 'win32') { - - var rimraf = require('rimraf'); - var fs = require('fs'); - - describe('node test.prefix.js', function () { - - it('Test path prefix', function () { - - var prefix = './tmp/testfolder/'; - fs.mkdirSync(prefix, { recursive:true }); - var CustomPouch = PouchDB.defaults({prefix}); - - var db = new CustomPouch('testdb'); - - return db.info().then(function () { - // This will throw if the folder does not exist - fs.lstatSync(prefix + 'testdb'); - rimraf.sync('./tmp/testfolder'); - }); - - }); - - }); - -} diff --git a/tests/integration/test.viewadapter.js b/tests/integration/test.viewadapter.js index e85ce47e0f..19f1c142c7 100644 --- a/tests/integration/test.viewadapter.js +++ b/tests/integration/test.viewadapter.js @@ -55,7 +55,7 @@ viewAdapters.forEach(viewAdapter => { return done(); } - if (db.adapter !== 'leveldb' && db.adapter !== 'idb') { + if (db.adapter !== 'idb') { return done(); } @@ -109,7 +109,7 @@ viewAdapters.forEach(viewAdapter => { it('Create pouch with no view adapters', function (done) { const db = new PouchDB(dbs.name); - if (db.adapter !== 'leveldb' && db.adapter !== 'idb') { + if (db.adapter !== 'idb') { return done(); } diff --git a/tests/mapreduce/test.persisted.js b/tests/mapreduce/test.persisted.js index 6fa37690ff..3ff9299818 100644 --- a/tests/mapreduce/test.persisted.js +++ b/tests/mapreduce/test.persisted.js @@ -655,14 +655,14 @@ describe('test.persisted.js', function () { var isNode = typeof window === 'undefined'; if (dbType === 'local' && isNode) { - it('#239 test memdown db', function () { + it('#239 test db', function () { var destroyedDBs = []; PouchDB.on('destroyed', function (db) { destroyedDBs.push(db); }); // make sure prefixed DBs are tied to regular DBs - var db = new PouchDB(dbName, {db: require('memdown')}); + var db = new PouchDB(dbName, 'nodesqlite'); return testUtils.fin(createView(db, { map: function (doc) { emit(doc.name); diff --git a/tests/memleak/test.memleak.js b/tests/memleak/test.memleak.js index 67db7014cb..3eb91a421b 100644 --- a/tests/memleak/test.memleak.js +++ b/tests/memleak/test.memleak.js @@ -428,31 +428,3 @@ describe('test.memleak.js -- http adapter', function () { Runner(measure,Run); }); }); - -describe('test.memleak.js -- leveldown adapter', function () { - - it('Test basic memory leak in PouchDB leveldown adapter', function (next) { - this.timeout(360*1000); - - var opts = { - dump_snapshots: default_opts.dump_snapshots, - max_growth: 33000, - max_percent: 1, - runs: 2000 - }; - - var measure = new MeasureHeap(next,opts,'level'); - - function Run() { - var db = new PouchDB('goodluck'); - function Finally() { return db.close(); } - return db.info() - .then( Finally, Finally ) - .then(function () { - return sleep(20); - }); - } - - Runner(measure,Run); - }); -}); diff --git a/tests/unit/test.checkpointer.js b/tests/unit/test.checkpointer.js index d1a826af1f..380ef08890 100644 --- a/tests/unit/test.checkpointer.js +++ b/tests/unit/test.checkpointer.js @@ -4,11 +4,10 @@ require("chai").should(); var PouchDB = require("../../packages/node_modules/pouchdb-for-coverage"); var Checkpointer = require("../../packages/node_modules/pouchdb-checkpointer"); -var memdown = require("memdown"); var genReplicationId = PouchDB.utils.generateReplicationId; -var sourceDb = new PouchDB({ name: "local_test_db", db: memdown }); -var targetDb = new PouchDB({ name: "target_test_db", db: memdown }); +var sourceDb = new PouchDB({ name: "local_test_db" }); +var targetDb = new PouchDB({ name: "target_test_db" }); describe("test.checkpointer.js", () => { it("create checkpointer instance", async () => { diff --git a/tests/unit/test.gen-replication-id.js b/tests/unit/test.gen-replication-id.js index 3ce677f810..0968b4daad 100644 --- a/tests/unit/test.gen-replication-id.js +++ b/tests/unit/test.gen-replication-id.js @@ -1,10 +1,9 @@ 'use strict'; -var memdown = require('memdown'); var PouchDB = require('../../packages/node_modules/pouchdb-for-coverage'); var genReplicationId = PouchDB.utils.generateReplicationId; -var sourceDb = new PouchDB({name: 'local_db', db: memdown}); -var targetDb = new PouchDB({name: 'target_db', db: memdown}); +var sourceDb = new PouchDB({name: 'local_db'}); +var targetDb = new PouchDB({name: 'target_db'}); require('chai').should(); diff --git a/tests/unit/test.memory-adapter.js b/tests/unit/test.memory-adapter.js deleted file mode 100644 index 927deafa7b..0000000000 --- a/tests/unit/test.memory-adapter.js +++ /dev/null @@ -1,79 +0,0 @@ -var PouchDB = require('../../packages/node_modules/pouchdb-for-coverage'); -var memoryAdapter = require('../../packages/node_modules/pouchdb-adapter-memory'); -PouchDB.plugin(memoryAdapter); - -describe('test.memory-adapter.js', () => { - it('Race condition initially discovered with PouchDB in-memory-adapter 7.3.0', async () => { - const func1 = async () => { - const pouch1 = new PouchDB('test-db', { - adapter: 'memory' - }); - const docId = 'func1doc1'; - - // insert - await pouch1.bulkDocs({ - docs: [{ - _id: docId, - value: 1, - _rev: '1-51b2fae5721cc4d3cf7392f19e6cc118' - }] - }, { - new_edits: false - }); - - // update - let getDocs = await pouch1.bulkGet({ - docs: [{id: docId}], - revs: true, - latest: true - }); - const useRevs = (getDocs). - results[0].docs[0].ok._revisions; - useRevs.start = useRevs.start + 1; - useRevs.ids.unshift('a723631364fbfa906c5ffa8203ac9725'); - - await pouch1.bulkDocs({ - docs: [{ - _id: docId, - value: 2, - _rev: '2-a723631364fbfa906c5ffa8203ac9725', - _revisions: useRevs - }] - }, { - new_edits: false - }); - - // delete - getDocs = await pouch1.bulkGet({ - docs: [{id: docId}], - revs: true, - latest: true - }); - - // same via .get - await pouch1.get(docId); - // if this is switched to pouch1.destroy(); ... this test will pass. - pouch1.close(); - }; - - const func2 = async () => { - const pouch2 = new PouchDB( - 'test-db-2', { - adapter: 'memory', - }); - - await pouch2.createIndex({ - index: { - fields: ['foo'] - } - }); - pouch2.destroy(); - }; - - // func1 succeeds when run alone. - // func2 succeeds when run alone. - // As of PouchDB 7.3.0, when running these functions in parallel, there is a race condition where func2 gets - // impacted by func1. The result: func2 will hang and the test will timeout. - await Promise.all([func1(), func2()]); - }); -});